Refactor pt1
This commit is contained in:
parent
cf3491e5a7
commit
84e4c17fda
34 changed files with 984 additions and 903 deletions
|
@ -1,57 +0,0 @@
|
|||
use std::cell::Ref;
|
||||
use std::cell::RefMut;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use ariadne::Report;
|
||||
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
use super::document::Document;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum CustomStyleToken {
|
||||
Toggle(String),
|
||||
Pair(String, String),
|
||||
}
|
||||
|
||||
pub trait CustomStyle: core::fmt::Debug {
|
||||
/// Name for the custom style
|
||||
fn name(&self) -> &str;
|
||||
/// Gets the begin and end token for a custom style
|
||||
fn tokens(&self) -> &CustomStyleToken;
|
||||
|
||||
fn on_start<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
parser: &dyn Parser,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
) -> Result<(), Report<(Rc<dyn Source>, Range<usize>)>>;
|
||||
fn on_end<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
parser: &dyn Parser,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
) -> Result<(), Report<(Rc<dyn Source>, Range<usize>)>>;
|
||||
}
|
||||
|
||||
pub trait CustomStyleHolder {
|
||||
/// gets a reference to all defined custom styles
|
||||
fn custom_styles(&self) -> Ref<'_, HashMap<String, Rc<dyn CustomStyle>>>;
|
||||
|
||||
/// gets a (mutable) reference to all defined custom styles
|
||||
fn custom_styles_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn CustomStyle>>>;
|
||||
|
||||
fn get_custom_style(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
|
||||
self.custom_styles()
|
||||
.get(style_name)
|
||||
.map(|style| style.clone())
|
||||
}
|
||||
|
||||
fn insert_custom_style(&self, style: Rc<dyn CustomStyle>) {
|
||||
self.custom_styles_mut().insert(style.name().into(), style);
|
||||
}
|
||||
}
|
|
@ -3,6 +3,3 @@ pub mod references;
|
|||
pub mod langdocument;
|
||||
pub mod element;
|
||||
pub mod variable;
|
||||
pub mod style;
|
||||
pub mod layout;
|
||||
pub mod customstyle;
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
use std::{path::PathBuf, rc::Rc};
|
||||
use crate::{elements::text::Text, parser::{parser::Parser, source::{Source, Token, VirtualSource}}};
|
||||
use super::document::Document;
|
||||
|
||||
use crate::elements::text::Text;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::source::VirtualSource;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
// TODO enforce to_string(from_string(to_string())) == to_string()
|
||||
pub trait Variable
|
||||
{
|
||||
pub trait Variable {
|
||||
fn location(&self) -> &Token;
|
||||
|
||||
fn name(&self) -> &str;
|
||||
|
@ -15,90 +19,97 @@ pub trait Variable
|
|||
/// Converts variable to a string
|
||||
fn to_string(&self) -> String;
|
||||
|
||||
fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>);
|
||||
fn parse<'a>(&self, state: &mut ParserState, location: Token, document: &'a dyn Document<'a>);
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for dyn Variable
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
impl core::fmt::Debug for dyn Variable {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{{{}}}", self.name(), self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BaseVariable
|
||||
{
|
||||
location: Token,
|
||||
name: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl BaseVariable {
|
||||
pub fn new(location: Token, name: String, value: String) -> Self {
|
||||
Self { location, name, value }
|
||||
}
|
||||
}
|
||||
|
||||
impl Variable for BaseVariable
|
||||
{
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.value = str.to_string();
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.value.clone() }
|
||||
|
||||
fn parse<'a>(&self, _location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
self.location().clone(),
|
||||
self.name().to_string(),
|
||||
self.to_string()));
|
||||
|
||||
parser.parse_into(source, document);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathVariable
|
||||
{
|
||||
pub struct BaseVariable {
|
||||
location: Token,
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
name: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl PathVariable
|
||||
{
|
||||
pub fn new(location: Token, name: String, path: PathBuf) -> Self {
|
||||
Self { location, name, path }
|
||||
}
|
||||
impl BaseVariable {
|
||||
pub fn new(location: Token, name: String, value: String) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Variable for PathVariable
|
||||
{
|
||||
impl Variable for BaseVariable {
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.path = PathBuf::from(std::fs::canonicalize(str).unwrap());
|
||||
None
|
||||
}
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.value = str.to_string();
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
|
||||
fn to_string(&self) -> String { self.value.clone() }
|
||||
|
||||
fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document) {
|
||||
fn parse<'a>(&self, state: &mut ParserState, _location: Token, document: &'a dyn Document<'a>) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
self.location().clone(),
|
||||
self.name().to_string(),
|
||||
self.to_string(),
|
||||
));
|
||||
|
||||
state.with_state(|new_state| new_state.parser.parse_into(new_state, source, document))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathVariable {
|
||||
location: Token,
|
||||
name: String,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl PathVariable {
|
||||
pub fn new(location: Token, name: String, path: PathBuf) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Variable for PathVariable {
|
||||
fn location(&self) -> &Token { &self.location }
|
||||
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.path = PathBuf::from(std::fs::canonicalize(str).unwrap());
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
|
||||
|
||||
fn parse<'a>(&self, state: &mut ParserState, location: Token, document: &'a dyn Document) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
location,
|
||||
self.name().to_string(),
|
||||
self.to_string()));
|
||||
self.to_string(),
|
||||
));
|
||||
|
||||
parser.push(document, Box::new(Text::new(
|
||||
Token::new(0..source.content().len(), source),
|
||||
self.to_string()
|
||||
)));
|
||||
}
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Text::new(
|
||||
Token::new(0..source.content().len(), source),
|
||||
self.to_string(),
|
||||
)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -336,7 +337,7 @@ impl RegexRule for CodeRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -353,7 +354,7 @@ impl RegexRule for CodeRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Code is missing properties: {e}"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -371,7 +372,7 @@ impl RegexRule for CodeRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), props.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -393,7 +394,7 @@ impl RegexRule for CodeRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), lang.range()))
|
||||
.with_message("No language specified")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -411,9 +412,9 @@ impl RegexRule for CodeRule {
|
|||
Label::new((token.source().clone(), lang.range()))
|
||||
.with_message(format!(
|
||||
"Language `{}` cannot be found",
|
||||
code_lang.fg(parser.colors().info)
|
||||
code_lang.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -443,7 +444,7 @@ impl RegexRule for CodeRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message("Code content cannot be empty")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -475,9 +476,9 @@ impl RegexRule for CodeRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.start()+1..token.end()))
|
||||
.with_message(format!("Property `line_offset: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)))
|
||||
.with_color(parser.colors().warning))
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)))
|
||||
.with_color(state.parser.colors().warning))
|
||||
.finish());
|
||||
return reports;
|
||||
}
|
||||
|
@ -492,9 +493,9 @@ impl RegexRule for CodeRule {
|
|||
))
|
||||
.with_message(format!(
|
||||
"Property `{}` doesn't exist",
|
||||
err.fg(parser.colors().info)
|
||||
err.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -504,7 +505,7 @@ impl RegexRule for CodeRule {
|
|||
}
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Code::new(
|
||||
token.clone(),
|
||||
|
@ -525,7 +526,7 @@ impl RegexRule for CodeRule {
|
|||
CodeKind::Inline
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Code::new(
|
||||
token.clone(),
|
||||
|
@ -542,7 +543,7 @@ impl RegexRule for CodeRule {
|
|||
reports
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
bindings.push((
|
||||
"push_inline".to_string(),
|
||||
|
@ -644,7 +645,7 @@ impl RegexRule for CodeRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -679,7 +680,7 @@ fn fact(n: usize) -> usize
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
let borrow = doc.content().borrow();
|
||||
let found = borrow
|
||||
|
|
|
@ -3,14 +3,13 @@ use crate::document::document::Document;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
|
@ -60,7 +59,7 @@ impl RegexRule for CommentRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -78,7 +77,7 @@ impl RegexRule for CommentRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), comment.range()))
|
||||
.with_message("Comment is empty")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -88,7 +87,7 @@ impl RegexRule for CommentRule {
|
|||
}
|
||||
};
|
||||
|
||||
parser.push(document, Box::new(Comment::new(token.clone(), content)));
|
||||
state.parser.push(document, Box::new(Comment::new(token.clone(), content)));
|
||||
|
||||
return reports;
|
||||
}
|
||||
|
@ -118,7 +117,7 @@ COMMENT ::Test
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Paragraph {
|
||||
|
|
|
@ -13,19 +13,20 @@ use mlua::Error::BadArgument;
|
|||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
|
||||
use crate::document::customstyle::CustomStyle;
|
||||
use crate::document::customstyle::CustomStyleToken;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::lua::kernel::KernelContext;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::customstyle::CustomStyle;
|
||||
use crate::parser::customstyle::CustomStyleToken;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
use crate::parser::state::State;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
|
@ -47,13 +48,13 @@ impl CustomStyle for LuaCustomStyle {
|
|||
fn on_start<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
parser: &dyn Parser,
|
||||
parser_state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
) -> Result<(), Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let kernel = parser.get_kernel("main").unwrap();
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let kernel = parser_state.shared.kernels.get("main").unwrap();
|
||||
let ctx = KernelContext {
|
||||
location: location.clone(),
|
||||
parser,
|
||||
parser_state,
|
||||
document,
|
||||
};
|
||||
|
||||
|
@ -67,11 +68,11 @@ impl CustomStyle for LuaCustomStyle {
|
|||
.with_label(
|
||||
Label::new((location.source(), location.range.clone()))
|
||||
.with_message(err.to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(parser_state.parser.colors().error),
|
||||
)
|
||||
.with_note(format!(
|
||||
"When trying to start custom style {}",
|
||||
self.name().fg(parser.colors().info)
|
||||
self.name().fg(parser_state.parser.colors().info)
|
||||
))
|
||||
.finish(),
|
||||
);
|
||||
|
@ -84,13 +85,13 @@ impl CustomStyle for LuaCustomStyle {
|
|||
fn on_end<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
parser: &dyn Parser,
|
||||
document: &'a dyn Document<'a>,
|
||||
) -> Result<(), Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let kernel = parser.get_kernel("main").unwrap();
|
||||
parser_state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let kernel = parser_state.shared.kernels.get("main").unwrap();
|
||||
let ctx = KernelContext {
|
||||
location: location.clone(),
|
||||
parser,
|
||||
parser_state,
|
||||
document,
|
||||
};
|
||||
|
||||
|
@ -104,11 +105,11 @@ impl CustomStyle for LuaCustomStyle {
|
|||
.with_label(
|
||||
Label::new((location.source(), location.range.clone()))
|
||||
.with_message(err.to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(parser_state.colors().error),
|
||||
)
|
||||
.with_note(format!(
|
||||
"When trying to end custom style {}",
|
||||
self.name().fg(parser.colors().info)
|
||||
self.name().fg(parser_state.colors().info)
|
||||
))
|
||||
.finish(),
|
||||
);
|
||||
|
@ -123,13 +124,13 @@ struct CustomStyleState {
|
|||
toggled: HashMap<String, Token>,
|
||||
}
|
||||
|
||||
impl State for CustomStyleState {
|
||||
impl RuleState for CustomStyleState {
|
||||
fn scope(&self) -> Scope { Scope::PARAGRAPH }
|
||||
|
||||
fn on_remove<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
document: &dyn Document,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document
|
||||
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
||||
|
@ -154,15 +155,15 @@ impl State for CustomStyleState {
|
|||
.with_order(1)
|
||||
.with_message(format!(
|
||||
"Style {} starts here",
|
||||
style.fg(parser.colors().info)
|
||||
style.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_label(
|
||||
Label::new(paragraph_end)
|
||||
.with_order(1)
|
||||
.with_message(format!("Paragraph ends here"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_note("Styles cannot span multiple documents (i.e @import)")
|
||||
.finish(),
|
||||
|
@ -173,22 +174,21 @@ impl State for CustomStyleState {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct CustomStyleRule;
|
||||
static STATE_NAME: &'static str = "elements.custom_style";
|
||||
|
||||
lazy_static! {
|
||||
static ref STATE_NAME: String = "elements.custom_style".to_string();
|
||||
}
|
||||
pub struct CustomStyleRule;
|
||||
|
||||
impl Rule for CustomStyleRule {
|
||||
fn name(&self) -> &'static str { "Custom Style" }
|
||||
|
||||
fn next_match(&self, parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
let content = cursor.source.content();
|
||||
|
||||
let mut closest_match = usize::MAX;
|
||||
let mut matched_style = (None, false);
|
||||
parser
|
||||
.custom_styles()
|
||||
state
|
||||
.shared
|
||||
.custom_styles
|
||||
.iter()
|
||||
.for_each(|(_name, style)| match style.tokens() {
|
||||
CustomStyleToken::Toggle(s) => {
|
||||
|
@ -228,7 +228,7 @@ impl Rule for CustomStyleRule {
|
|||
|
||||
fn on_match<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
cursor: Cursor,
|
||||
match_data: Option<Box<dyn Any>>,
|
||||
|
@ -239,13 +239,13 @@ impl Rule for CustomStyleRule {
|
|||
.downcast_ref::<(Rc<dyn CustomStyle>, bool)>()
|
||||
.unwrap();
|
||||
|
||||
let query = parser.state().query(&STATE_NAME);
|
||||
let state = match query {
|
||||
let query = state.shared.rule_state.get(STATE_NAME);
|
||||
let rule_state = match query {
|
||||
Some(state) => state,
|
||||
None => {
|
||||
// Insert as a new state
|
||||
match parser.state_mut().insert(
|
||||
STATE_NAME.clone(),
|
||||
match state.shared.rule_state.insert(
|
||||
STATE_NAME.into(),
|
||||
Rc::new(RefCell::new(CustomStyleState {
|
||||
toggled: HashMap::new(),
|
||||
})),
|
||||
|
@ -258,7 +258,7 @@ impl Rule for CustomStyleRule {
|
|||
|
||||
let (close, token) = match style.tokens() {
|
||||
CustomStyleToken::Toggle(s) => {
|
||||
let mut borrow = state.borrow_mut();
|
||||
let mut borrow = rule_state.borrow_mut();
|
||||
let state = borrow.downcast_mut::<CustomStyleState>().unwrap();
|
||||
|
||||
match state.toggled.get(style.name()) {
|
||||
|
@ -281,7 +281,7 @@ impl Rule for CustomStyleRule {
|
|||
}
|
||||
}
|
||||
CustomStyleToken::Pair(s_begin, s_end) => {
|
||||
let mut borrow = state.borrow_mut();
|
||||
let mut borrow = rule_state.borrow_mut();
|
||||
let state = borrow.downcast_mut::<CustomStyleState>().unwrap();
|
||||
|
||||
if *end {
|
||||
|
@ -299,9 +299,9 @@ impl Rule for CustomStyleRule {
|
|||
.with_order(1)
|
||||
.with_message(format!(
|
||||
"Cannot end style {} here, is it not started anywhere",
|
||||
style.name().fg(parser.colors().info)
|
||||
style.name().fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
],
|
||||
|
@ -330,18 +330,18 @@ impl Rule for CustomStyleRule {
|
|||
.with_order(1)
|
||||
.with_message(format!(
|
||||
"Style cannot {} starts here",
|
||||
style.name().fg(parser.colors().info)
|
||||
style.name().fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_label(
|
||||
Label::new((start_token.source(), start_token.range.clone()))
|
||||
.with_order(2)
|
||||
.with_message(format!(
|
||||
"Style {} starts previously here",
|
||||
style.name().fg(parser.colors().info)
|
||||
style.name().fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish()],
|
||||
);
|
||||
|
@ -354,9 +354,9 @@ impl Rule for CustomStyleRule {
|
|||
};
|
||||
|
||||
if let Err(rep) = if close {
|
||||
style.on_end(token.clone(), parser, document)
|
||||
style.on_end(token.clone(), state, document)
|
||||
} else {
|
||||
style.on_start(token.clone(), parser, document)
|
||||
style.on_start(token.clone(), state, document)
|
||||
} {
|
||||
return (
|
||||
cursor.at(token.end()),
|
||||
|
@ -370,7 +370,7 @@ impl Rule for CustomStyleRule {
|
|||
}
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -388,7 +388,7 @@ impl Rule for CustomStyleRule {
|
|||
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
if let Some(_) = ctx.parser.get_custom_style(name.as_str()) {
|
||||
if let Some(_) = ctx.state.shared.custom_styles.get(name.as_str()) {
|
||||
result = Err(BadArgument {
|
||||
to: Some("define_toggled".to_string()),
|
||||
pos: 1,
|
||||
|
@ -399,7 +399,7 @@ impl Rule for CustomStyleRule {
|
|||
});
|
||||
return;
|
||||
}
|
||||
ctx.parser.insert_custom_style(Rc::new(style));
|
||||
ctx.state.shared.custom_styles.insert(Rc::new(style));
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -443,7 +443,7 @@ impl Rule for CustomStyleRule {
|
|||
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
if let Some(_) = ctx.parser.get_custom_style(name.as_str()) {
|
||||
if let Some(_) = ctx.state.shared.custom_styles.get(name.as_str()) {
|
||||
result = Err(BadArgument {
|
||||
to: Some("define_paired".to_string()),
|
||||
pos: 1,
|
||||
|
@ -454,7 +454,7 @@ impl Rule for CustomStyleRule {
|
|||
});
|
||||
return;
|
||||
}
|
||||
ctx.parser.insert_custom_style(Rc::new(style));
|
||||
ctx.state.shared.custom_styles.insert(Rc::new(style));
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -464,7 +464,7 @@ impl Rule for CustomStyleRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ use regex::Regex;
|
|||
use crate::document::document::Document;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -58,7 +59,7 @@ impl ElemStyleRule {
|
|||
impl Rule for ElemStyleRule {
|
||||
fn name(&self) -> &'static str { "Element Style" }
|
||||
|
||||
fn next_match(&self, _parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.start_re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.map_or(None, |m| {
|
||||
|
@ -68,7 +69,7 @@ impl Rule for ElemStyleRule {
|
|||
|
||||
fn on_match<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
_document: &'a (dyn Document<'a> + 'a),
|
||||
cursor: Cursor,
|
||||
_match_data: Option<Box<dyn Any>>,
|
||||
|
@ -91,7 +92,7 @@ impl Rule for ElemStyleRule {
|
|||
.with_label(
|
||||
Label::new((cursor.source.clone(), key.range()))
|
||||
.with_message(format!("Expected a non-empty style key",))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -99,7 +100,7 @@ impl Rule for ElemStyleRule {
|
|||
}
|
||||
|
||||
// Check if key exists
|
||||
if !parser.is_style_registered(trimmed) {
|
||||
if !state.shared.style.is_registered(trimmed) {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, cursor.source.clone(), key.start())
|
||||
.with_message("Unknown Style Key")
|
||||
|
@ -107,9 +108,9 @@ impl Rule for ElemStyleRule {
|
|||
Label::new((cursor.source.clone(), key.range()))
|
||||
.with_message(format!(
|
||||
"Could not find a style with key: {}",
|
||||
trimmed.fg(parser.colors().info)
|
||||
trimmed.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -117,7 +118,7 @@ impl Rule for ElemStyleRule {
|
|||
return (cursor, reports);
|
||||
}
|
||||
|
||||
parser.current_style(trimmed)
|
||||
state.shared.style.current_style(trimmed)
|
||||
} else {
|
||||
panic!("Unknown error")
|
||||
};
|
||||
|
@ -135,7 +136,7 @@ impl Rule for ElemStyleRule {
|
|||
.with_message(format!(
|
||||
"Unable to parse json string after style key",
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -157,10 +158,10 @@ impl Rule for ElemStyleRule {
|
|||
))
|
||||
.with_message(format!(
|
||||
"Failed to serialize `{}` into style with key `{}`: {err}",
|
||||
json.fg(parser.colors().highlight),
|
||||
style.key().fg(parser.colors().info)
|
||||
json.fg(state.parser.colors().highlight),
|
||||
style.key().fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -171,12 +172,12 @@ impl Rule for ElemStyleRule {
|
|||
}
|
||||
};
|
||||
|
||||
parser.set_current_style(new_style);
|
||||
state.shared.styles.set_current(new_style);
|
||||
|
||||
(cursor, reports)
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -215,6 +216,6 @@ impl Rule for ElemStyleRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ use std::ops::Range;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Once;
|
||||
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyMapError;
|
||||
use crate::parser::util::PropertyParser;
|
||||
|
@ -190,7 +191,7 @@ impl RegexRule for GraphRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -207,10 +208,10 @@ impl RegexRule for GraphRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Missing terminating `{}` after first `{}`",
|
||||
"[/graph]".fg(parser.colors().info),
|
||||
"[graph]".fg(parser.colors().info)
|
||||
"[/graph]".fg(state.parser.colors().info),
|
||||
"[graph]".fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -230,7 +231,7 @@ impl RegexRule for GraphRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), content.range()))
|
||||
.with_message("Graph code is empty")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -251,7 +252,7 @@ impl RegexRule for GraphRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Graph is missing property: {e}"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -269,7 +270,7 @@ impl RegexRule for GraphRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), props.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -294,10 +295,10 @@ impl RegexRule for GraphRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Property `layout: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -313,7 +314,7 @@ impl RegexRule for GraphRule {
|
|||
token.start() + 1..token.end(),
|
||||
))
|
||||
.with_message(err)
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -340,9 +341,9 @@ impl RegexRule for GraphRule {
|
|||
))
|
||||
.with_message(format!(
|
||||
"Property `{}` is missing",
|
||||
err.fg(parser.colors().info)
|
||||
err.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -352,7 +353,7 @@ impl RegexRule for GraphRule {
|
|||
},
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Graphviz {
|
||||
location: token,
|
||||
|
@ -364,7 +365,4 @@ impl RegexRule for GraphRule {
|
|||
|
||||
reports
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -10,8 +11,6 @@ use ariadne::Fmt;
|
|||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::ops::Range;
|
||||
|
@ -48,7 +47,7 @@ impl RegexRule for ImportRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -57,7 +56,7 @@ impl RegexRule for ImportRule {
|
|||
|
||||
// Path
|
||||
let import_file = match matches.get(2) {
|
||||
Some(name) => match ImportRule::validate_name(parser.colors(), name.as_str()) {
|
||||
Some(name) => match ImportRule::validate_name(state.parser.colors(), name.as_str()) {
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
|
@ -66,9 +65,9 @@ impl RegexRule for ImportRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Import name `{}` is invalid. {msg}",
|
||||
name.as_str().fg(parser.colors().highlight)
|
||||
name.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -85,9 +84,9 @@ impl RegexRule for ImportRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Unable to access file `{}`",
|
||||
filename.fg(parser.colors().highlight)
|
||||
filename.fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -104,9 +103,9 @@ impl RegexRule for ImportRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Path `{}` is not a file!",
|
||||
filename.fg(parser.colors().highlight)
|
||||
filename.fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -121,7 +120,7 @@ impl RegexRule for ImportRule {
|
|||
|
||||
// [Optional] import as
|
||||
let import_as = match matches.get(1) {
|
||||
Some(as_name) => match ImportRule::validate_as(parser.colors(), as_name.as_str()) {
|
||||
Some(as_name) => match ImportRule::validate_as(state.parser.colors(), as_name.as_str()) {
|
||||
Ok(as_name) => as_name,
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
|
@ -131,9 +130,9 @@ impl RegexRule for ImportRule {
|
|||
Label::new((token.source(), as_name.range()))
|
||||
.with_message(format!(
|
||||
"Canot import `{import_file}` as `{}`. {msg}",
|
||||
as_name.as_str().fg(parser.colors().highlight)
|
||||
as_name.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -153,7 +152,7 @@ impl RegexRule for ImportRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.range))
|
||||
.with_message(format!("Failed to read content from path `{path}`"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -161,12 +160,15 @@ impl RegexRule for ImportRule {
|
|||
}
|
||||
};
|
||||
|
||||
let import_doc = parser.parse(import, Some(document));
|
||||
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
|
||||
state.with_state(|new_state| {
|
||||
let import_doc = new_state.parser.parse(new_state, import, Some(document));
|
||||
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
|
||||
});
|
||||
|
||||
// Close paragraph
|
||||
if document.last_element::<Paragraph>().is_some() {
|
||||
parser.push(
|
||||
// TODO2: Check if this is safe to remove
|
||||
if document.last_element::<Paragraph>().is_none() {
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Paragraph {
|
||||
location: Token::new(token.end()..token.end(), token.source()),
|
||||
|
@ -177,6 +179,4 @@ impl RegexRule for ImportRule {
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
|
|
@ -3,21 +3,22 @@ use crate::compiler::compiler::Target;
|
|||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::document::layout::LayoutType;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::layout::LayoutHolder;
|
||||
use crate::parser::layout::LayoutType;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
use crate::parser::state::State;
|
||||
use crate::parser::util::process_escaped;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use lazy_static::lazy_static;
|
||||
use mlua::Error::BadArgument;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
|
@ -54,7 +55,8 @@ impl FromStr for LayoutToken {
|
|||
}
|
||||
|
||||
mod default_layouts {
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::layout::LayoutType;
|
||||
use crate::parser::util::Property;
|
||||
use crate::parser::util::PropertyParser;
|
||||
|
||||
use super::*;
|
||||
|
@ -242,12 +244,12 @@ struct LayoutState {
|
|||
pub(self) stack: Vec<(Vec<Token>, Rc<dyn LayoutType>)>,
|
||||
}
|
||||
|
||||
impl State for LayoutState {
|
||||
impl RuleState for LayoutState {
|
||||
fn scope(&self) -> Scope { Scope::DOCUMENT }
|
||||
|
||||
fn on_remove<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
@ -265,15 +267,15 @@ impl State for LayoutState {
|
|||
.with_order(1)
|
||||
.with_message(format!(
|
||||
"Layout {} stars here",
|
||||
layout_type.name().fg(parser.colors().info)
|
||||
layout_type.name().fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_label(
|
||||
Label::new((at.source(), at.range.clone()))
|
||||
.with_order(2)
|
||||
.with_message("Document ends here".to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -313,15 +315,15 @@ impl LayoutRule {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn initialize_state(parser: &dyn Parser) -> Rc<RefCell<dyn State>> {
|
||||
let query = parser.state().query(&STATE_NAME);
|
||||
pub fn initialize_state(state: &mut ParserState) -> Rc<RefCell<dyn RuleState>> {
|
||||
let query = state.shared.rule_state.get(STATE_NAME);
|
||||
match query {
|
||||
Some(state) => state,
|
||||
None => {
|
||||
// Insert as a new state
|
||||
match parser.state_mut().insert(
|
||||
STATE_NAME.clone(),
|
||||
Rc::new(RefCell::new(LayoutState { stack: vec![] })),
|
||||
match state.shared.rule_state.insert(
|
||||
STATE_NAME.into(),
|
||||
Rc::new(LayoutState { stack: vec![] }),
|
||||
) {
|
||||
Err(_) => panic!("Unknown error"),
|
||||
Ok(state) => state,
|
||||
|
@ -373,9 +375,7 @@ impl LayoutRule {
|
|||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref STATE_NAME: String = "elements.layout".to_string();
|
||||
}
|
||||
static STATE_NAME: &'static str = "elements.layout";
|
||||
|
||||
impl RegexRule for LayoutRule {
|
||||
fn name(&self) -> &'static str { "Layout" }
|
||||
|
@ -385,14 +385,14 @@ impl RegexRule for LayoutRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
||||
let state = LayoutRule::initialize_state(parser);
|
||||
let rule_state = LayoutRule::initialize_state(state);
|
||||
|
||||
if index == 0
|
||||
// BEGIN_LAYOUT
|
||||
|
@ -406,9 +406,9 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Missing layout name after `{}`",
|
||||
"#+BEGIN_LAYOUT".fg(parser.colors().highlight)
|
||||
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -426,9 +426,9 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Empty layout name after `{}`",
|
||||
"#+BEGIN_LAYOUT".fg(parser.colors().highlight)
|
||||
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -443,9 +443,9 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Missing a space before layout name `{}`",
|
||||
name.as_str().fg(parser.colors().highlight)
|
||||
name.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -453,7 +453,7 @@ impl RegexRule for LayoutRule {
|
|||
}
|
||||
|
||||
// Get layout
|
||||
let layout_type = match parser.get_layout(trimmed) {
|
||||
let layout_type = match state.shared.layouts.get(trimmed) {
|
||||
None => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
|
@ -462,9 +462,9 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Cannot find layout `{}`",
|
||||
trimmed.fg(parser.colors().highlight)
|
||||
trimmed.fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -475,7 +475,7 @@ impl RegexRule for LayoutRule {
|
|||
|
||||
// Parse properties
|
||||
let properties = match LayoutRule::parse_properties(
|
||||
parser.colors(),
|
||||
state.parser.colors(),
|
||||
&token,
|
||||
layout_type.clone(),
|
||||
matches.get(1),
|
||||
|
@ -487,7 +487,7 @@ impl RegexRule for LayoutRule {
|
|||
}
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Layout {
|
||||
location: token.clone(),
|
||||
|
@ -524,7 +524,7 @@ impl RegexRule for LayoutRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message("No active layout found".to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -543,10 +543,10 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Layout expects a maximum of {} blocks, currently at {}",
|
||||
layout_type.expects().end.fg(parser.colors().info),
|
||||
tokens.len().fg(parser.colors().info),
|
||||
layout_type.expects().end.fg(state.parser.colors().info),
|
||||
tokens.len().fg(state.parser.colors().info),
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -555,7 +555,7 @@ impl RegexRule for LayoutRule {
|
|||
|
||||
// Parse properties
|
||||
let properties = match LayoutRule::parse_properties(
|
||||
parser.colors(),
|
||||
state.parser.colors(),
|
||||
&token,
|
||||
layout_type.clone(),
|
||||
matches.get(1),
|
||||
|
@ -587,7 +587,7 @@ impl RegexRule for LayoutRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message("No active layout found".to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -606,10 +606,10 @@ impl RegexRule for LayoutRule {
|
|||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Layout expects a minimum of {} blocks, currently at {}",
|
||||
layout_type.expects().start.fg(parser.colors().info),
|
||||
tokens.len().fg(parser.colors().info),
|
||||
layout_type.expects().start.fg(state.parser.colors().info),
|
||||
tokens.len().fg(state.parser.colors().info),
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -618,7 +618,7 @@ impl RegexRule for LayoutRule {
|
|||
|
||||
// Parse properties
|
||||
let properties = match LayoutRule::parse_properties(
|
||||
parser.colors(),
|
||||
state.parser.colors(),
|
||||
&token,
|
||||
layout_type.clone(),
|
||||
matches.get(1),
|
||||
|
@ -636,7 +636,7 @@ impl RegexRule for LayoutRule {
|
|||
(id, LayoutToken::End, layout_type, properties)
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Layout {
|
||||
location: token,
|
||||
|
@ -650,8 +650,8 @@ impl RegexRule for LayoutRule {
|
|||
return reports;
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
// TODO: Add method to create new layouts
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -831,7 +831,7 @@ impl RegexRule for LayoutRule {
|
|||
}
|
||||
};
|
||||
|
||||
ctx.parser.push(
|
||||
ctx.state.parser.push(
|
||||
ctx.document,
|
||||
Box::new(Layout {
|
||||
location: ctx.location.clone(),
|
||||
|
@ -850,12 +850,12 @@ impl RegexRule for LayoutRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
|
||||
fn register_layouts(&self, parser: &dyn Parser) {
|
||||
parser.insert_layout(Rc::new(default_layouts::Centered::default()));
|
||||
parser.insert_layout(Rc::new(default_layouts::Split::default()));
|
||||
fn register_layouts(&self, holder: &mut LayoutHolder) {
|
||||
holder.insert(Rc::new(default_layouts::Centered::default()));
|
||||
holder.insert(Rc::new(default_layouts::Split::default()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -892,7 +892,7 @@ mod tests {
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Layout { token == LayoutToken::Begin, id == 0 };
|
||||
|
@ -944,7 +944,7 @@ mod tests {
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Layout { token == LayoutToken::Begin, id == 0 };
|
||||
|
|
|
@ -6,6 +6,7 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -91,7 +92,7 @@ impl RegexRule for LinkRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -107,7 +108,7 @@ impl RegexRule for LinkRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), display.range()))
|
||||
.with_message("Link name is empty")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -122,10 +123,10 @@ impl RegexRule for LinkRule {
|
|||
Label::new((token.source(), display.range()))
|
||||
.with_message(format!(
|
||||
"Link name is empty. Once processed, `{}` yields `{}`",
|
||||
display.as_str().fg(parser.colors().highlight),
|
||||
processed.fg(parser.colors().highlight),
|
||||
display.as_str().fg(state.parser.colors().highlight),
|
||||
processed.fg(state.parser.colors().highlight),
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -137,7 +138,7 @@ impl RegexRule for LinkRule {
|
|||
"Link Display".to_string(),
|
||||
processed,
|
||||
));
|
||||
match util::parse_paragraph(parser, source, document) {
|
||||
match util::parse_paragraph(state, source, document) {
|
||||
Err(err) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), display.start())
|
||||
|
@ -145,7 +146,7 @@ impl RegexRule for LinkRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), display.range()))
|
||||
.with_message(err.to_string())
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -166,7 +167,7 @@ impl RegexRule for LinkRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), url.range()))
|
||||
.with_message("Link url is empty")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -182,10 +183,10 @@ impl RegexRule for LinkRule {
|
|||
Label::new((token.source(), url.range()))
|
||||
.with_message(format!(
|
||||
"Link url is empty. Once processed, `{}` yields `{}`",
|
||||
url.as_str().fg(parser.colors().highlight),
|
||||
text_content.as_str().fg(parser.colors().highlight),
|
||||
url.as_str().fg(state.parser.colors().highlight),
|
||||
text_content.as_str().fg(state.parser.colors().highlight),
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -196,7 +197,7 @@ impl RegexRule for LinkRule {
|
|||
_ => panic!("Empty link url"),
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Link {
|
||||
location: token,
|
||||
|
@ -208,7 +209,7 @@ impl RegexRule for LinkRule {
|
|||
return reports;
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -256,7 +257,7 @@ impl RegexRule for LinkRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::document::document::DocumentAccessors;
|
|||
use crate::document::element::ContainerElement;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -25,8 +25,6 @@ use crate::parser::util::PropertyParser;
|
|||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Match;
|
||||
use regex::Regex;
|
||||
|
||||
|
@ -136,7 +134,7 @@ impl ListRule {
|
|||
|
||||
fn push_markers(
|
||||
token: &Token,
|
||||
parser: &dyn Parser,
|
||||
state: &ParserState,
|
||||
document: &dyn Document,
|
||||
current: &Vec<(bool, usize)>,
|
||||
target: &Vec<(bool, usize)>,
|
||||
|
@ -152,7 +150,7 @@ impl ListRule {
|
|||
|
||||
// Close
|
||||
for i in start_pos..current.len() {
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(ListMarker {
|
||||
location: token.clone(),
|
||||
|
@ -164,7 +162,7 @@ impl ListRule {
|
|||
|
||||
// Open
|
||||
for i in start_pos..target.len() {
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(ListMarker {
|
||||
location: token.clone(),
|
||||
|
@ -252,7 +250,7 @@ impl ListRule {
|
|||
impl Rule for ListRule {
|
||||
fn name(&self) -> &'static str { "List" }
|
||||
|
||||
fn next_match(&self, _parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.start_re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.map_or(None, |m| {
|
||||
|
@ -262,10 +260,10 @@ impl Rule for ListRule {
|
|||
|
||||
fn on_match<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
cursor: Cursor,
|
||||
_match_data: Option<Box<dyn Any>>,
|
||||
_match_data: Box<dyn Any>,
|
||||
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
|
||||
let mut reports = vec![];
|
||||
|
||||
|
@ -295,7 +293,7 @@ impl Rule for ListRule {
|
|||
.with_label(
|
||||
Label::new((cursor.source.clone(), properties.range()))
|
||||
.with_message(err)
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -354,12 +352,12 @@ impl Rule for ListRule {
|
|||
captures.get(1).unwrap().range(),
|
||||
))
|
||||
.with_message("Spacing for list entries do not match")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.with_label(
|
||||
Label::new((cursor.source.clone(), spacing.0.clone()))
|
||||
.with_message("Previous spacing")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -379,7 +377,7 @@ impl Rule for ListRule {
|
|||
"List Entry".to_string(),
|
||||
entry_content,
|
||||
));
|
||||
let parsed_content = match util::parse_paragraph(parser, entry_src, document) {
|
||||
let parsed_content = match util::parse_paragraph(state, entry_src, document) {
|
||||
Err(err) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Warning, token.source(), token.range.start)
|
||||
|
@ -387,7 +385,7 @@ impl Rule for ListRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.range.clone()))
|
||||
.with_message(err)
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -400,12 +398,12 @@ impl Rule for ListRule {
|
|||
.last_element::<ListEntry>()
|
||||
.map(|ent| ent.numbering.clone())
|
||||
{
|
||||
ListRule::push_markers(&token, parser, document, &previous_depth, &depth);
|
||||
ListRule::push_markers(&token, state, document, &previous_depth, &depth);
|
||||
} else {
|
||||
ListRule::push_markers(&token, parser, document, &vec![], &depth);
|
||||
ListRule::push_markers(&token, state, document, &vec![], &depth);
|
||||
}
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(ListEntry {
|
||||
location: Token::new(
|
||||
|
@ -428,13 +426,10 @@ impl Rule for ListRule {
|
|||
.map(|ent| ent.numbering.clone())
|
||||
.unwrap();
|
||||
let token = Token::new(end_cursor.pos..end_cursor.pos, end_cursor.source.clone());
|
||||
ListRule::push_markers(&token, parser, document, ¤t, &Vec::new());
|
||||
ListRule::push_markers(&token, state, document, ¤t, &Vec::new());
|
||||
|
||||
(end_cursor, reports)
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -443,6 +438,7 @@ mod tests {
|
|||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::text::Text;
|
||||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
|
||||
|
@ -466,7 +462,8 @@ mod tests {
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let state = ParserState::new(&parser, None);
|
||||
let doc = parser.parse(state, source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
ListMarker { numbered == false, kind == MarkerKind::Open };
|
||||
|
|
|
@ -24,6 +24,7 @@ use crate::document::element::Element;
|
|||
use crate::document::element::ReferenceableElement;
|
||||
use crate::document::references::validate_refname;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -333,7 +334,7 @@ impl RegexRule for MediaRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -377,7 +378,7 @@ impl RegexRule for MediaRule {
|
|||
};
|
||||
|
||||
// Properties
|
||||
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
|
||||
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3)) {
|
||||
Ok(pm) => pm,
|
||||
Err(report) => {
|
||||
reports.push(report);
|
||||
|
@ -401,10 +402,10 @@ impl RegexRule for MediaRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Property `type: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -420,7 +421,7 @@ impl RegexRule for MediaRule {
|
|||
token.start() + 1..token.end(),
|
||||
))
|
||||
.with_message(format!("{err}. Required because mediatype could not be detected"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -454,7 +455,7 @@ impl RegexRule for MediaRule {
|
|||
if source.content().is_empty() {
|
||||
None
|
||||
} else {
|
||||
match parse_paragraph(parser, source, document) {
|
||||
match parse_paragraph(state, source, document) {
|
||||
Ok(paragraph) => Some(*paragraph),
|
||||
Err(err) => {
|
||||
reports.push(
|
||||
|
@ -465,7 +466,7 @@ impl RegexRule for MediaRule {
|
|||
.with_message(format!(
|
||||
"Could not parse description: {err}"
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -480,7 +481,7 @@ impl RegexRule for MediaRule {
|
|||
let mut group = match document.last_element_mut::<Media>() {
|
||||
Some(group) => group,
|
||||
None => {
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Media {
|
||||
location: token.clone(),
|
||||
|
@ -507,7 +508,7 @@ impl RegexRule for MediaRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(err)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -515,8 +516,6 @@ impl RegexRule for MediaRule {
|
|||
|
||||
reports
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -550,7 +549,7 @@ mod tests {
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
let borrow = doc.content().borrow();
|
||||
let group = borrow.first().as_ref().unwrap().as_container().unwrap();
|
||||
|
|
|
@ -3,8 +3,6 @@ use std::ops::Range;
|
|||
use std::rc::Rc;
|
||||
|
||||
use ariadne::Report;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::compiler::compiler::Compiler;
|
||||
|
@ -14,6 +12,7 @@ use crate::document::element::ContainerElement;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -108,7 +107,7 @@ impl ParagraphRule {
|
|||
impl Rule for ParagraphRule {
|
||||
fn name(&self) -> &'static str { "Paragraphing" }
|
||||
|
||||
fn next_match(&self, _parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.and_then(|m| Some((m.start(), Box::new([false; 0]) as Box<dyn Any>)))
|
||||
|
@ -116,7 +115,7 @@ impl Rule for ParagraphRule {
|
|||
|
||||
fn on_match(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
cursor: Cursor,
|
||||
_match_data: Option<Box<dyn Any>>,
|
||||
|
@ -126,7 +125,7 @@ impl Rule for ParagraphRule {
|
|||
Some(capture) => cursor.at(capture.get(0).unwrap().end() - 1),
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Paragraph {
|
||||
location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
|
||||
|
@ -136,9 +135,6 @@ impl Rule for ParagraphRule {
|
|||
|
||||
(end_cursor, Vec::new())
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -169,7 +165,7 @@ Last paragraph
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Paragraph {
|
||||
|
|
|
@ -4,6 +4,7 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
@ -78,7 +79,7 @@ impl RegexRule for RawRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
_index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -95,10 +96,10 @@ impl RegexRule for RawRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Missing terminating `{}` after first `{}`",
|
||||
"?}".fg(parser.colors().info),
|
||||
"{?".fg(parser.colors().info)
|
||||
"?}".fg(state.parser.colors().info),
|
||||
"{?".fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -115,7 +116,7 @@ impl RegexRule for RawRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), content.range()))
|
||||
.with_message("Raw code is empty")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -134,7 +135,7 @@ impl RegexRule for RawRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!("Raw code is missing properties: {e}"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -152,7 +153,7 @@ impl RegexRule for RawRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), props.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -176,10 +177,10 @@ impl RegexRule for RawRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Property `kind: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -196,9 +197,9 @@ impl RegexRule for RawRule {
|
|||
))
|
||||
.with_message(format!(
|
||||
"Property `{}` is missing",
|
||||
err.fg(parser.colors().info)
|
||||
err.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -207,7 +208,7 @@ impl RegexRule for RawRule {
|
|||
},
|
||||
};
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Raw {
|
||||
location: token.clone(),
|
||||
|
@ -219,7 +220,7 @@ impl RegexRule for RawRule {
|
|||
reports
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -242,7 +243,7 @@ impl RegexRule for RawRule {
|
|||
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(
|
||||
ctx.state.parser.push(
|
||||
ctx.document,
|
||||
Box::new(Raw {
|
||||
location: ctx.location.clone(),
|
||||
|
@ -258,7 +259,7 @@ impl RegexRule for RawRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -282,7 +283,7 @@ Break{?[kind=block] Raw?}NewParagraph{?<b>?}
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Paragraph;
|
||||
|
@ -297,15 +298,15 @@ Break{?[kind=block] Raw?}NewParagraph{?<b>?}
|
|||
#[test]
|
||||
fn lua() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
"".to_string(),
|
||||
r#"
|
||||
Break%<nml.raw.push("block", "Raw")>%NewParagraph%<nml.raw.push("inline", "<b>")>%
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Paragraph;
|
||||
|
|
|
@ -19,6 +19,7 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::document::references::validate_refname;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -135,7 +136,7 @@ impl RegexRule for ReferenceRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -155,7 +156,7 @@ impl RegexRule for ReferenceRule {
|
|||
Label::new((token.source().clone(), m.range())).with_message(
|
||||
format!(
|
||||
"Could not find element with reference: `{}`",
|
||||
refname.fg(parser.colors().info)
|
||||
refname.fg(state.parser.colors().info)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
@ -178,7 +179,7 @@ impl RegexRule for ReferenceRule {
|
|||
}
|
||||
};
|
||||
// Properties
|
||||
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
|
||||
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3)) {
|
||||
Ok(pm) => pm,
|
||||
Err(report) => {
|
||||
reports.push(report);
|
||||
|
@ -193,7 +194,7 @@ impl RegexRule for ReferenceRule {
|
|||
.ok()
|
||||
.and_then(|(_, s)| Some(s));
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Reference {
|
||||
location: token,
|
||||
|
@ -204,6 +205,4 @@ impl RegexRule for ReferenceRule {
|
|||
|
||||
reports
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserStrategy;
|
||||
|
||||
use super::code::CodeRule;
|
||||
use super::comment::CommentRule;
|
||||
|
|
|
@ -2,6 +2,7 @@ use crate::document::document::Document;
|
|||
use crate::lua::kernel::Kernel;
|
||||
use crate::lua::kernel::KernelContext;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -84,7 +85,7 @@ impl RegexRule for ScriptRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -93,7 +94,7 @@ impl RegexRule for ScriptRule {
|
|||
|
||||
let kernel_name = match matches.get(1) {
|
||||
None => "main".to_string(),
|
||||
Some(name) => match ScriptRule::validate_kernel_name(parser.colors(), name.as_str()) {
|
||||
Some(name) => match ScriptRule::validate_kernel_name(state.parser.colors(), name.as_str()) {
|
||||
Ok(name) => name,
|
||||
Err(e) => {
|
||||
reports.push(
|
||||
|
@ -102,7 +103,7 @@ impl RegexRule for ScriptRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(e)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -110,9 +111,12 @@ impl RegexRule for ScriptRule {
|
|||
}
|
||||
},
|
||||
};
|
||||
let kernel = parser
|
||||
.get_kernel(kernel_name.as_str())
|
||||
.unwrap_or_else(|| parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser)));
|
||||
let kernel = state.shared.kernels
|
||||
.get(kernel_name.as_str())
|
||||
.unwrap_or_else(|| {
|
||||
state.shared.kernels.insert(kernel_name.to_string(), Kernel::new(state));
|
||||
state.shared.kernels.get(kernel_name.as_str()).unwrap()
|
||||
});
|
||||
|
||||
let kernel_data = matches
|
||||
.get(if index == 0 { 2 } else { 3 })
|
||||
|
@ -127,7 +131,7 @@ impl RegexRule for ScriptRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), token.start() + 1..token.end()))
|
||||
.with_message("Kernel code is empty")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -166,7 +170,7 @@ impl RegexRule for ScriptRule {
|
|||
"Kernel execution failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -178,7 +182,7 @@ impl RegexRule for ScriptRule {
|
|||
// Validate kind
|
||||
let kind = match matches.get(2) {
|
||||
None => 0,
|
||||
Some(kind) => match self.validate_kind(parser.colors(), kind.as_str()) {
|
||||
Some(kind) => match self.validate_kind(state.parser.colors(), kind.as_str()) {
|
||||
Ok(kind) => kind,
|
||||
Err(msg) => {
|
||||
reports.push(
|
||||
|
@ -187,7 +191,7 @@ impl RegexRule for ScriptRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), kind.range()))
|
||||
.with_message(msg)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -209,7 +213,7 @@ impl RegexRule for ScriptRule {
|
|||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -223,7 +227,7 @@ impl RegexRule for ScriptRule {
|
|||
// Eval to text
|
||||
{
|
||||
if !result.is_empty() {
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Text::new(
|
||||
Token::new(1..source.content().len(), source.clone()),
|
||||
|
@ -240,7 +244,9 @@ impl RegexRule for ScriptRule {
|
|||
result,
|
||||
)) as Rc<dyn Source>;
|
||||
|
||||
parser.parse_into(parse_source, document);
|
||||
state.with_state(|new_state| {
|
||||
new_state.parser.parse_into(new_state, parse_source, document);
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
|
@ -253,7 +259,7 @@ impl RegexRule for ScriptRule {
|
|||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -267,15 +273,12 @@ impl RegexRule for ScriptRule {
|
|||
|
||||
let ctx = KernelContext {
|
||||
location: Token::new(0..source.content().len(), source.clone()),
|
||||
parser,
|
||||
parser_state: state,
|
||||
document,
|
||||
};
|
||||
|
||||
kernel.run_with_context(ctx, execute)
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -6,9 +6,11 @@ use crate::document::element::Element;
|
|||
use crate::document::element::ReferenceableElement;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::style::StyleHolder;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
|
@ -160,7 +162,7 @@ impl RegexRule for SectionRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: regex::Captures,
|
||||
|
@ -175,9 +177,9 @@ impl RegexRule for SectionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), depth.range()))
|
||||
.with_message(format!("Section is of depth {}, which is greather than {} (maximum depth allowed)",
|
||||
depth.len().fg(parser.colors().info),
|
||||
6.fg(parser.colors().info)))
|
||||
.with_color(parser.colors().error))
|
||||
depth.len().fg(state.parser.colors().info),
|
||||
6.fg(state.parser.colors().info)))
|
||||
.with_color(state.parser.colors().error))
|
||||
.finish());
|
||||
return result;
|
||||
}
|
||||
|
@ -202,17 +204,17 @@ impl RegexRule for SectionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), refname.range()))
|
||||
.with_message(format!("Reference with name `{}` is already defined in `{}`",
|
||||
refname.as_str().fg(parser.colors().highlight),
|
||||
elem.location().source().name().as_str().fg(parser.colors().highlight)))
|
||||
refname.as_str().fg(state.parser.colors().highlight),
|
||||
elem.location().source().name().as_str().fg(state.parser.colors().highlight)))
|
||||
.with_message(format!("`{}` conflicts with previously defined reference to {}",
|
||||
refname.as_str().fg(parser.colors().highlight),
|
||||
elem.element_name().fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().warning))
|
||||
refname.as_str().fg(state.parser.colors().highlight),
|
||||
elem.element_name().fg(state.parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().warning))
|
||||
.with_label(
|
||||
Label::new((elem.location().source(), elem.location().start()..elem.location().end() ))
|
||||
.with_message(format!("`{}` previously defined here",
|
||||
refname.as_str().fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().warning))
|
||||
refname.as_str().fg(state.parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().warning))
|
||||
.with_note(format!("Previous reference was overwritten"))
|
||||
.finish());
|
||||
}
|
||||
|
@ -234,10 +236,10 @@ impl RegexRule for SectionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), kind.range()))
|
||||
.with_message(format!("Section numbering kind must be a combination of `{}` for unnumbered, and `{}` for non-listing; got `{}`",
|
||||
"*".fg(parser.colors().info),
|
||||
"+".fg(parser.colors().info),
|
||||
kind.as_str().fg(parser.colors().highlight)))
|
||||
.with_color(parser.colors().error))
|
||||
"*".fg(state.parser.colors().info),
|
||||
"+".fg(state.parser.colors().info),
|
||||
kind.as_str().fg(state.parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().error))
|
||||
.with_help(format!("Leave empty for a numbered listed section"))
|
||||
.finish());
|
||||
return result;
|
||||
|
@ -265,7 +267,7 @@ impl RegexRule for SectionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message("Sections require a name before line end")
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -280,8 +282,8 @@ impl RegexRule for SectionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message("Sections require at least one whitespace before the section's name")
|
||||
.with_color(parser.colors().warning))
|
||||
.with_help(format!("Add a space before `{}`", section_name.fg(parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().warning))
|
||||
.with_help(format!("Add a space before `{}`", section_name.fg(state.parser.colors().highlight)))
|
||||
.finish());
|
||||
return result;
|
||||
}
|
||||
|
@ -292,12 +294,12 @@ impl RegexRule for SectionRule {
|
|||
};
|
||||
|
||||
// Get style
|
||||
let style = parser
|
||||
let style = state.shared.styles
|
||||
.current_style(section_style::STYLE_KEY)
|
||||
.downcast_rc::<SectionStyle>()
|
||||
.unwrap();
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Section {
|
||||
location: token.clone(),
|
||||
|
@ -312,7 +314,7 @@ impl RegexRule for SectionRule {
|
|||
return result;
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
|
||||
bindings.push((
|
||||
|
@ -365,11 +367,11 @@ impl RegexRule for SectionRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
|
||||
fn register_styles(&self, parser: &dyn Parser) {
|
||||
parser.set_current_style(Rc::new(SectionStyle::default()));
|
||||
fn register_styles(&self, holder: &mut StyleHolder) {
|
||||
holder.set_current_style(Rc::new(SectionStyle::default()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -377,7 +379,6 @@ mod section_style {
|
|||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::document::style::ElementStyle;
|
||||
use crate::impl_elementstyle;
|
||||
|
||||
pub static STYLE_KEY: &'static str = "style.section";
|
||||
|
@ -409,7 +410,6 @@ mod section_style {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::document::style::StyleHolder;
|
||||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
|
@ -432,7 +432,7 @@ mod tests {
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Section { depth == 1, title == "1" };
|
||||
|
@ -462,7 +462,7 @@ nml.section.push("6", 6, "", "refname")
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(source, None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), source, None);
|
||||
|
||||
validate_document!(doc.content().borrow(), 0,
|
||||
Section { depth == 1, title == "1" };
|
||||
|
@ -488,9 +488,11 @@ nml.section.push("6", 6, "", "refname")
|
|||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let _ = parser.parse(source, None);
|
||||
let state = ParserState::new(&parser, None);
|
||||
let _ = parser.parse(state, source, None);
|
||||
|
||||
let style = parser
|
||||
let style = state.shared
|
||||
.styles
|
||||
.current_style(section_style::STYLE_KEY)
|
||||
.downcast_rc::<SectionStyle>()
|
||||
.unwrap();
|
||||
|
|
|
@ -5,18 +5,16 @@ use crate::document::document::DocumentAccessors;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
use crate::parser::state::Scope;
|
||||
use crate::parser::state::State;
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
use lazy_static::lazy_static;
|
||||
use mlua::Function;
|
||||
use mlua::Lua;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::cell::RefCell;
|
||||
|
@ -77,12 +75,12 @@ impl StyleState {
|
|||
}
|
||||
}
|
||||
|
||||
impl State for StyleState {
|
||||
impl RuleState for StyleState {
|
||||
fn scope(&self) -> Scope { Scope::PARAGRAPH }
|
||||
|
||||
fn on_remove<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut reports = vec![];
|
||||
|
@ -116,15 +114,15 @@ impl State for StyleState {
|
|||
.with_order(1)
|
||||
.with_message(format!(
|
||||
"Style {} starts here",
|
||||
name.fg(parser.colors().info)
|
||||
name.fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_label(
|
||||
Label::new(paragraph_end)
|
||||
.with_order(1)
|
||||
.with_message(format!("Paragraph ends here"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_note("Styles cannot span multiple documents (i.e @import)")
|
||||
.finish(),
|
||||
|
@ -156,9 +154,7 @@ impl StyleRule {
|
|||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref STATE_NAME: String = "elements.style".to_string();
|
||||
}
|
||||
static STATE_NAME : &'static str = "elements.style";
|
||||
|
||||
impl RegexRule for StyleRule {
|
||||
fn name(&self) -> &'static str { "Style" }
|
||||
|
@ -168,19 +164,18 @@ impl RegexRule for StyleRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
_matches: Captures,
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
|
||||
let query = parser.state().query(&STATE_NAME);
|
||||
let query = state.shared.rule_state.get(&STATE_NAME);
|
||||
let state = match query {
|
||||
Some(state) => state,
|
||||
None => {
|
||||
// Insert as a new state
|
||||
match parser
|
||||
.state_mut()
|
||||
.insert(STATE_NAME.clone(), Rc::new(RefCell::new(StyleState::new())))
|
||||
match state.shared.rule_state
|
||||
.insert(STATE_NAME.into(), Rc::new(RefCell::new(StyleState::new())))
|
||||
{
|
||||
Err(_) => panic!("Unknown error"),
|
||||
Ok(state) => state,
|
||||
|
@ -192,7 +187,7 @@ impl RegexRule for StyleRule {
|
|||
style_state.toggled[index] = style_state.toggled[index]
|
||||
.clone()
|
||||
.map_or(Some(token.clone()), |_| None);
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Style::new(
|
||||
token.clone(),
|
||||
|
@ -206,9 +201,6 @@ impl RegexRule for StyleRule {
|
|||
|
||||
return vec![];
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -28,6 +28,7 @@ use crate::document::document::Document;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -304,7 +305,7 @@ impl RegexRule for TexRule {
|
|||
fn on_regex_match(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
|
@ -321,10 +322,10 @@ impl RegexRule for TexRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Missing terminating `{}` after first `{}`",
|
||||
["|$", "$"][index].fg(parser.colors().info),
|
||||
["$|", "$"][index].fg(parser.colors().info)
|
||||
["|$", "$"][index].fg(state.parser.colors().info),
|
||||
["$|", "$"][index].fg(state.parser.colors().info)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -344,7 +345,7 @@ impl RegexRule for TexRule {
|
|||
.with_label(
|
||||
Label::new((token.source().clone(), content.range()))
|
||||
.with_message("Tex code is empty")
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -354,7 +355,7 @@ impl RegexRule for TexRule {
|
|||
};
|
||||
|
||||
// Properties
|
||||
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(1)) {
|
||||
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(1)) {
|
||||
Ok(pm) => pm,
|
||||
Err(report) => {
|
||||
reports.push(report);
|
||||
|
@ -376,10 +377,10 @@ impl RegexRule for TexRule {
|
|||
Label::new((token.source().clone(), token.range.clone()))
|
||||
.with_message(format!(
|
||||
"Property `kind: {}` cannot be converted: {}",
|
||||
prop.fg(parser.colors().info),
|
||||
err.fg(parser.colors().error)
|
||||
prop.fg(state.parser.colors().info),
|
||||
err.fg(state.parser.colors().error)
|
||||
))
|
||||
.with_color(parser.colors().warning),
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -412,7 +413,7 @@ impl RegexRule for TexRule {
|
|||
.and_then(|(_, value)| Some(value))
|
||||
.unwrap();
|
||||
|
||||
parser.push(
|
||||
state.parser.push(
|
||||
document,
|
||||
Box::new(Tex {
|
||||
mathmode: index == 1,
|
||||
|
@ -426,9 +427,6 @@ impl RegexRule for TexRule {
|
|||
|
||||
reports
|
||||
}
|
||||
|
||||
// TODO
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -12,6 +12,7 @@ use crate::document::element::ElemKind;
|
|||
use crate::document::element::Element;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::Rule;
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -48,11 +49,11 @@ pub struct TextRule;
|
|||
impl Rule for TextRule {
|
||||
fn name(&self) -> &'static str { "Text" }
|
||||
|
||||
fn next_match(&self, _parser: &dyn Parser, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { None }
|
||||
fn next_match(&self, _state: &ParserState, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { None }
|
||||
|
||||
fn on_match(
|
||||
&self,
|
||||
_parser: &dyn Parser,
|
||||
_state: &mut ParserState,
|
||||
_document: &dyn Document,
|
||||
_cursor: Cursor,
|
||||
_match_data: Option<Box<dyn Any>>,
|
||||
|
@ -60,14 +61,14 @@ impl Rule for TextRule {
|
|||
panic!("Text cannot match");
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
bindings.push((
|
||||
"push".to_string(),
|
||||
lua.create_function(|_, content: String| {
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
ctx.parser.push(
|
||||
ctx.state.parser.push(
|
||||
ctx.document,
|
||||
Box::new(Text {
|
||||
location: ctx.location.clone(),
|
||||
|
@ -82,6 +83,6 @@ impl Rule for TextRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ use crate::document::variable::PathVariable;
|
|||
use crate::document::variable::Variable;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::parser::ReportColors;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::Source;
|
||||
|
@ -123,7 +124,7 @@ impl RegexRule for VariableRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document,
|
||||
token: Token,
|
||||
matches: regex::Captures,
|
||||
|
@ -148,9 +149,9 @@ impl RegexRule for VariableRule {
|
|||
Label::new((token.source(), kind.range()))
|
||||
.with_message(format!(
|
||||
"Variable kind `{}` is unknown",
|
||||
kind.as_str().fg(parser.colors().highlight)
|
||||
kind.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_help(format!(
|
||||
"Leave empty for regular variables. Available variable kinds:{}",
|
||||
|
@ -159,8 +160,8 @@ impl RegexRule for VariableRule {
|
|||
|acc, (char, name)| {
|
||||
acc + format!(
|
||||
"\n - `{}` : {}",
|
||||
char.fg(parser.colors().highlight),
|
||||
name.fg(parser.colors().info)
|
||||
char.fg(state.parser.colors().highlight),
|
||||
name.fg(state.parser.colors().info)
|
||||
)
|
||||
.as_str()
|
||||
}
|
||||
|
@ -178,7 +179,7 @@ impl RegexRule for VariableRule {
|
|||
};
|
||||
|
||||
let var_name = match matches.get(2) {
|
||||
Some(name) => match VariableRule::validate_name(&parser.colors(), name.as_str()) {
|
||||
Some(name) => match VariableRule::validate_name(&state.parser.colors(), name.as_str()) {
|
||||
Ok(var_name) => var_name,
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
|
@ -188,9 +189,9 @@ impl RegexRule for VariableRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Variable name `{}` is not allowed. {msg}",
|
||||
name.as_str().fg(parser.colors().highlight)
|
||||
name.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -212,9 +213,9 @@ impl RegexRule for VariableRule {
|
|||
Label::new((token.source(), value.range()))
|
||||
.with_message(format!(
|
||||
"Variable value `{}` is not allowed. {msg}",
|
||||
value.as_str().fg(parser.colors().highlight)
|
||||
value.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -226,7 +227,7 @@ impl RegexRule for VariableRule {
|
|||
};
|
||||
|
||||
match self.make_variable(
|
||||
&parser.colors(),
|
||||
&state.parser.colors(),
|
||||
token.clone(),
|
||||
var_kind,
|
||||
var_name.to_string(),
|
||||
|
@ -242,10 +243,10 @@ impl RegexRule for VariableRule {
|
|||
Label::new((token.source(), m.start() + 1..m.end()))
|
||||
.with_message(format!(
|
||||
"Unable to create variable `{}`. {}",
|
||||
var_name.fg(parser.colors().highlight),
|
||||
var_name.fg(state.parser.colors().highlight),
|
||||
msg
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -257,7 +258,7 @@ impl RegexRule for VariableRule {
|
|||
return result;
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
let mut bindings = vec![];
|
||||
bindings.push((
|
||||
"insert".to_string(),
|
||||
|
@ -291,7 +292,7 @@ impl RegexRule for VariableRule {
|
|||
.unwrap(),
|
||||
));
|
||||
|
||||
Some(bindings)
|
||||
bindings
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -315,7 +316,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
_index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a dyn Document<'a>,
|
||||
token: Token,
|
||||
matches: regex::Captures,
|
||||
|
@ -332,7 +333,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), matches.get(0).unwrap().range()))
|
||||
.with_message(format!("Missing variable name for substitution"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -347,7 +348,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Variable names contains leading spaces"))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_help("Remove leading spaces")
|
||||
.finish(),
|
||||
|
@ -365,7 +366,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_message(format!(
|
||||
"Variable names contains trailing spaces"
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_help("Remove trailing spaces")
|
||||
.finish(),
|
||||
|
@ -374,7 +375,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
return result;
|
||||
}
|
||||
// Invalid name
|
||||
match VariableRule::validate_name(&parser.colors(), name.as_str()) {
|
||||
match VariableRule::validate_name(&state.parser.colors(), name.as_str()) {
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
|
@ -382,7 +383,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(msg)
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -402,9 +403,9 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Unable to find variable with name: `{}`",
|
||||
name.as_str().fg(parser.colors().highlight)
|
||||
name.as_str().fg(state.parser.colors().highlight)
|
||||
))
|
||||
.with_color(parser.colors().error),
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
@ -416,10 +417,8 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
_ => panic!("Unknown error"),
|
||||
};
|
||||
|
||||
variable.parse(token, parser, document);
|
||||
variable.parse(token, state, document);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::cell::RefCell;
|
||||
use std::cell::RefMut;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use mlua::Error;
|
||||
use mlua::FromLuaMulti;
|
||||
|
@ -9,17 +10,18 @@ use mlua::Lua;
|
|||
|
||||
use crate::document::document::Document;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
pub struct KernelContext<'a, 'b> {
|
||||
pub struct KernelContext<'a, 'b, 'c> {
|
||||
pub location: Token,
|
||||
pub parser: &'a dyn Parser,
|
||||
pub document: &'b dyn Document<'b>,
|
||||
pub state: &'a ParserState<'a, 'b>,
|
||||
pub document: &'c dyn Document<'c>,
|
||||
//pub parser: &'a dyn Parser,
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
pub static CTX: RefCell<Option<KernelContext<'static, 'static>>> = RefCell::new(None);
|
||||
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = RefCell::new(None);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -28,9 +30,6 @@ pub struct Kernel {
|
|||
}
|
||||
|
||||
impl Kernel {
|
||||
// TODO: Take parser as arg and
|
||||
// iterate over the rules
|
||||
// to find export the bindings (if some)
|
||||
pub fn new(parser: &dyn Parser) -> Self {
|
||||
let lua = Lua::new();
|
||||
|
||||
|
@ -38,16 +37,13 @@ impl Kernel {
|
|||
let nml_table = lua.create_table().unwrap();
|
||||
|
||||
for rule in parser.rules() {
|
||||
if let Some(bindings) = rule.lua_bindings(&lua) {
|
||||
let table = lua.create_table().unwrap();
|
||||
let name = rule.name().to_lowercase().replace(' ', "_");
|
||||
|
||||
for (fun_name, fun) in bindings {
|
||||
table.set(fun_name, fun).unwrap();
|
||||
}
|
||||
|
||||
nml_table.set(name, table).unwrap();
|
||||
let table = lua.create_table().unwrap();
|
||||
// TODO: Export this so we can check for duplicate rules based on this name
|
||||
let name = rule.name().to_lowercase().replace(' ', "_");
|
||||
for (fun_name, fun) in rule.lua_bindings(&lua) {
|
||||
table.set(fun_name, fun).unwrap();
|
||||
}
|
||||
nml_table.set(name, table).unwrap();
|
||||
}
|
||||
lua.globals().set("nml", nml_table).unwrap();
|
||||
}
|
||||
|
@ -71,8 +67,17 @@ impl Kernel {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait KernelHolder {
|
||||
fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>>;
|
||||
|
||||
fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel>;
|
||||
#[derive(Default)]
|
||||
pub struct KernelHolder {
|
||||
kernels: HashMap<String, Kernel>,
|
||||
}
|
||||
|
||||
impl KernelHolder {
|
||||
pub fn get(&self, kernel_name: &str) -> Option<&Kernel> {
|
||||
self.kernels.get(kernel_name)
|
||||
}
|
||||
|
||||
pub fn insert(&self, kernel_name: String, kernel: Kernel) {
|
||||
self.kernels.insert(kernel_name, kernel)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ use document::document::Document;
|
|||
use getopts::Options;
|
||||
use parser::langparser::LangParser;
|
||||
use parser::parser::Parser;
|
||||
use parser::parser::ParserState;
|
||||
use rusqlite::Connection;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
@ -46,13 +47,13 @@ NML version: 0.4\n"
|
|||
);
|
||||
}
|
||||
|
||||
fn parse(input: &str, debug_opts: &Vec<String>) -> Result<Box<dyn Document<'static>>, String> {
|
||||
fn parse(parser: &LangParser, input: &str, debug_opts: &Vec<String>) -> Result<Box<dyn Document<'static>>, String> {
|
||||
println!("Parsing {input}...");
|
||||
let parser = LangParser::default();
|
||||
|
||||
// Parse
|
||||
let source = SourceFile::new(input.to_string(), None).unwrap();
|
||||
let doc = parser.parse(Rc::new(source), None);
|
||||
let doc = parser.parse(ParserState::new(&parser, None), Rc::new(source), None);
|
||||
|
||||
if debug_opts.contains(&"ast".to_string()) {
|
||||
println!("-- BEGIN AST DEBUGGING --");
|
||||
|
@ -106,6 +107,7 @@ fn process(
|
|||
CompiledDocument::init_cache(&con)
|
||||
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
|
||||
|
||||
let parser = LangParser::default();
|
||||
for file in files {
|
||||
let meta = std::fs::metadata(&file)
|
||||
.map_err(|err| format!("Failed to get metadata for `{file:#?}`: {err}"))?;
|
||||
|
@ -123,7 +125,7 @@ fn process(
|
|||
|
||||
let parse_and_compile = || -> Result<CompiledDocument, String> {
|
||||
// Parse
|
||||
let doc = parse(file.to_str().unwrap(), debug_opts)?;
|
||||
let doc = parse(&parser, file.to_str().unwrap(), debug_opts)?;
|
||||
|
||||
// Compile
|
||||
let compiler = Compiler::new(target, db_path.clone());
|
||||
|
|
54
src/parser/customstyle.rs
Normal file
54
src/parser/customstyle.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use ariadne::Report;
|
||||
|
||||
use crate::document::document::Document;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
use super::parser::ParserState;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum CustomStyleToken {
|
||||
Toggle(String),
|
||||
Pair(String, String),
|
||||
}
|
||||
|
||||
pub trait CustomStyle: core::fmt::Debug {
|
||||
/// Name for the custom style
|
||||
fn name(&self) -> &str;
|
||||
/// Gets the begin and end token for a custom style
|
||||
fn tokens(&self) -> &CustomStyleToken;
|
||||
|
||||
fn on_start<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
|
||||
fn on_end<'a>(
|
||||
&self,
|
||||
location: Token,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct CustomStyleHolder {
|
||||
custom_styles: HashMap<String, Rc<dyn CustomStyle>>,
|
||||
}
|
||||
|
||||
impl CustomStyleHolder {
|
||||
fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
|
||||
self.custom_styles
|
||||
.get(style_name)
|
||||
.map(|style| style.clone())
|
||||
}
|
||||
|
||||
fn insert(&mut self, style: Rc<dyn CustomStyle>) {
|
||||
self.custom_styles.insert(style.name().into(), style);
|
||||
}
|
||||
}
|
|
@ -1,8 +1,4 @@
|
|||
use std::any::Any;
|
||||
use std::cell::Ref;
|
||||
use std::cell::RefCell;
|
||||
use std::cell::RefMut;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
@ -10,8 +6,6 @@ use std::rc::Rc;
|
|||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
|
||||
use crate::document::customstyle::CustomStyle;
|
||||
use crate::document::customstyle::CustomStyleHolder;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::document::element::ContainerElement;
|
||||
|
@ -19,26 +13,19 @@ use crate::document::element::DocumentEnd;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::document::langdocument::LangDocument;
|
||||
use crate::document::layout::LayoutHolder;
|
||||
use crate::document::layout::LayoutType;
|
||||
use crate::document::style::ElementStyle;
|
||||
use crate::document::style::StyleHolder;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::registrar::register;
|
||||
use crate::elements::text::Text;
|
||||
use crate::lua::kernel::Kernel;
|
||||
use crate::lua::kernel::KernelHolder;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::parser::source::VirtualSource;
|
||||
|
||||
use super::parser::Parser;
|
||||
use super::parser::ParserStrategy;
|
||||
use super::parser::ParserState;
|
||||
use super::parser::ReportColors;
|
||||
use super::rule::Rule;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::source::Token;
|
||||
use super::state::StateHolder;
|
||||
use super::util;
|
||||
|
||||
/// Parser for the language
|
||||
|
@ -49,13 +36,6 @@ pub struct LangParser {
|
|||
|
||||
// Parser state
|
||||
pub err_flag: RefCell<bool>,
|
||||
pub matches: RefCell<Vec<(usize, Option<Box<dyn Any>>)>>,
|
||||
|
||||
pub state: RefCell<StateHolder>,
|
||||
pub kernels: RefCell<HashMap<String, Kernel>>,
|
||||
pub styles: RefCell<HashMap<String, Rc<dyn ElementStyle>>>,
|
||||
pub layouts: RefCell<HashMap<String, Rc<dyn LayoutType>>>,
|
||||
pub custom_styles: RefCell<HashMap<String, Rc<dyn CustomStyle>>>,
|
||||
}
|
||||
|
||||
impl LangParser {
|
||||
|
@ -64,142 +44,36 @@ impl LangParser {
|
|||
rules: vec![],
|
||||
colors: ReportColors::with_colors(),
|
||||
err_flag: RefCell::new(false),
|
||||
matches: RefCell::new(Vec::new()),
|
||||
state: RefCell::new(StateHolder::new()),
|
||||
kernels: RefCell::new(HashMap::new()),
|
||||
styles: RefCell::new(HashMap::new()),
|
||||
layouts: RefCell::new(HashMap::new()),
|
||||
custom_styles: RefCell::new(HashMap::new()),
|
||||
//matches: RefCell::new(Vec::new()),
|
||||
//state: RefCell::new(StateHolder::new()),
|
||||
//kernels: RefCell::new(HashMap::new()),
|
||||
//styles: RefCell::new(HashMap::new()),
|
||||
//layouts: RefCell::new(HashMap::new()),
|
||||
//custom_styles: RefCell::new(HashMap::new()),
|
||||
};
|
||||
// Register rules
|
||||
// TODO2: use https://docs.rs/inventory/latest/inventory/
|
||||
register(&mut s);
|
||||
|
||||
// Register default kernel
|
||||
s.kernels
|
||||
.borrow_mut()
|
||||
.insert("main".to_string(), Kernel::new(&s));
|
||||
|
||||
// Register default styles
|
||||
for rule in &s.rules {
|
||||
rule.register_styles(&s);
|
||||
}
|
||||
|
||||
// Register default layouts
|
||||
for rule in &s.rules {
|
||||
rule.register_layouts(&s);
|
||||
}
|
||||
|
||||
s
|
||||
}
|
||||
|
||||
fn handle_reports<'a>(
|
||||
&self,
|
||||
_source: Rc<dyn Source>,
|
||||
reports: Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>,
|
||||
) {
|
||||
for mut report in reports {
|
||||
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
|
||||
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
|
||||
sources.insert(source.clone());
|
||||
match source.location() {
|
||||
Some(parent) => {
|
||||
let parent_source = parent.source();
|
||||
if sources.get(&parent_source).is_none() {
|
||||
recurse_source(sources, parent_source);
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
report.labels.iter().for_each(|label| {
|
||||
recurse_source(&mut sources, label.span.0.clone());
|
||||
});
|
||||
|
||||
let cache = sources
|
||||
.iter()
|
||||
.map(|source| (source.clone(), source.content().clone()))
|
||||
.collect::<Vec<(Rc<dyn Source>, String)>>();
|
||||
|
||||
cache.iter().for_each(|(source, _)| {
|
||||
if let Some(location) = source.location() {
|
||||
if let Some(_s) = source.downcast_ref::<SourceFile>() {
|
||||
report.labels.push(
|
||||
Label::new((location.source(), location.start() + 1..location.end()))
|
||||
.with_message("In file included from here")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
|
||||
let start = location.start()
|
||||
+ (location.source().content().as_bytes()[location.start()]
|
||||
== '\n' as u8)
|
||||
.then_some(1)
|
||||
.unwrap_or(0);
|
||||
report.labels.push(
|
||||
Label::new((location.source(), start..location.end()))
|
||||
.with_message("In evaluation of")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
}
|
||||
});
|
||||
report.eprint(ariadne::sources(cache)).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parser for LangParser {
|
||||
fn colors(&self) -> &ReportColors { &self.colors }
|
||||
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
|
||||
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
|
||||
|
||||
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
|
||||
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
|
||||
|
||||
fn has_error(&self) -> bool { *self.err_flag.borrow() }
|
||||
|
||||
/// Add an [`Element`] to the [`Document`]
|
||||
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>) {
|
||||
if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
|
||||
let mut paragraph = doc
|
||||
.last_element_mut::<Paragraph>()
|
||||
.or_else(|| {
|
||||
doc.push(Box::new(Paragraph {
|
||||
location: elem.location().clone(),
|
||||
content: Vec::new(),
|
||||
}));
|
||||
doc.last_element_mut::<Paragraph>()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
paragraph.push(elem).unwrap();
|
||||
} else {
|
||||
// Process paragraph events
|
||||
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
|
||||
self.handle_reports(
|
||||
doc.source(),
|
||||
self.state_mut()
|
||||
.on_scope_end(self, doc, super::state::Scope::PARAGRAPH),
|
||||
);
|
||||
}
|
||||
|
||||
doc.push(elem);
|
||||
}
|
||||
}
|
||||
|
||||
fn parse<'a>(
|
||||
&self,
|
||||
state: ParserState,
|
||||
source: Rc<dyn Source>,
|
||||
parent: Option<&'a dyn Document<'a>>,
|
||||
) -> Box<dyn Document<'a> + 'a> {
|
||||
let doc = LangDocument::new(source.clone(), parent);
|
||||
let mut matches = Vec::new();
|
||||
for _ in 0..self.rules.len() {
|
||||
matches.push((0usize, None));
|
||||
}
|
||||
|
||||
let content = source.content();
|
||||
let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file
|
||||
|
@ -207,15 +81,15 @@ impl Parser for LangParser {
|
|||
if let Some(parent) = parent
|
||||
// Terminate parent's paragraph state
|
||||
{
|
||||
self.handle_reports(
|
||||
Parser::handle_reports(&self,
|
||||
parent.source(),
|
||||
self.state_mut()
|
||||
state.shared.rule_state
|
||||
.on_scope_end(self, parent, super::state::Scope::PARAGRAPH),
|
||||
);
|
||||
}
|
||||
|
||||
loop {
|
||||
let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
|
||||
let (rule_pos, mut result) = state.update_matches(&cursor);
|
||||
|
||||
// Unmatched content
|
||||
let text_content =
|
||||
|
@ -230,10 +104,10 @@ impl Parser for LangParser {
|
|||
);
|
||||
}
|
||||
|
||||
if let Some(rule) = rule {
|
||||
if let Some((rule_index, match_data)) = result.take() {
|
||||
// Rule callback
|
||||
let dd: &'a dyn Document = unsafe { std::mem::transmute(&doc as &dyn Document) };
|
||||
let (new_cursor, reports) = rule.on_match(self, dd, rule_pos, match_data);
|
||||
let (new_cursor, reports) = self.rules[rule_index].on_match(self, dd, rule_pos, match_data);
|
||||
|
||||
self.handle_reports(doc.source(), reports);
|
||||
|
||||
|
@ -249,8 +123,8 @@ impl Parser for LangParser {
|
|||
// State
|
||||
self.handle_reports(
|
||||
doc.source(),
|
||||
self.state_mut()
|
||||
.on_scope_end(self, &doc, super::state::Scope::DOCUMENT),
|
||||
state.shared.rule_state
|
||||
.on_scope_end(&mut state, &doc, super::state::Scope::DOCUMENT),
|
||||
);
|
||||
|
||||
self.push(
|
||||
|
@ -264,17 +138,12 @@ impl Parser for LangParser {
|
|||
return Box::new(doc);
|
||||
}
|
||||
|
||||
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
|
||||
let mut matches = Vec::new();
|
||||
for _ in 0..self.rules.len() {
|
||||
matches.push((0usize, None));
|
||||
}
|
||||
|
||||
fn parse_into<'a>(&self, state: mut ParserState, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
|
||||
let content = source.content();
|
||||
let mut cursor = Cursor::new(0usize, source.clone());
|
||||
|
||||
loop {
|
||||
let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
|
||||
let (rule_pos, mut result) = state.update_matches(&cursor);
|
||||
|
||||
// Unmatched content
|
||||
let text_content =
|
||||
|
@ -289,9 +158,9 @@ impl Parser for LangParser {
|
|||
);
|
||||
}
|
||||
|
||||
if let Some(rule) = rule {
|
||||
if let Some((rule_index, match_data)) = result.take() {
|
||||
// Rule callback
|
||||
let (new_cursor, reports) = (*rule).on_match(self, document, rule_pos, match_data);
|
||||
let (new_cursor, reports) = self.rules[rule_index].on_match(&mut state, document, rule_pos, match_data);
|
||||
|
||||
self.handle_reports(document.source(), reports);
|
||||
|
||||
|
@ -311,43 +180,3 @@ impl Parser for LangParser {
|
|||
//return doc;
|
||||
}
|
||||
}
|
||||
|
||||
impl KernelHolder for LangParser {
|
||||
fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>> {
|
||||
RefMut::filter_map(self.kernels.borrow_mut(), |map| map.get_mut(name)).ok()
|
||||
}
|
||||
|
||||
fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel> {
|
||||
//TODO do not get
|
||||
self.kernels.borrow_mut().insert(name.clone(), kernel);
|
||||
self.get_kernel(name.as_str()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl StyleHolder for LangParser {
|
||||
fn element_styles(&self) -> Ref<'_, HashMap<String, Rc<dyn ElementStyle>>> {
|
||||
self.styles.borrow()
|
||||
}
|
||||
|
||||
fn element_styles_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn ElementStyle>>> {
|
||||
self.styles.borrow_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl LayoutHolder for LangParser {
|
||||
fn layouts(&self) -> Ref<'_, HashMap<String, Rc<dyn LayoutType>>> { self.layouts.borrow() }
|
||||
|
||||
fn layouts_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn LayoutType>>> {
|
||||
self.layouts.borrow_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl CustomStyleHolder for LangParser {
|
||||
fn custom_styles(&self) -> Ref<'_, HashMap<String, Rc<dyn CustomStyle>>> {
|
||||
self.custom_styles.borrow()
|
||||
}
|
||||
|
||||
fn custom_styles_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn CustomStyle>>> {
|
||||
self.custom_styles.borrow_mut()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,9 @@ use std::ops::Range;
|
|||
use std::rc::Rc;
|
||||
|
||||
use crate::compiler::compiler::Compiler;
|
||||
use crate::document::document::Document;
|
||||
use crate::elements::layout::LayoutToken;
|
||||
|
||||
use super::document::Document;
|
||||
|
||||
/// Represents the type of a layout
|
||||
pub trait LayoutType: core::fmt::Debug {
|
||||
/// Name of the layout
|
||||
|
@ -32,18 +31,17 @@ pub trait LayoutType: core::fmt::Debug {
|
|||
) -> Result<String, String>;
|
||||
}
|
||||
|
||||
pub trait LayoutHolder {
|
||||
/// gets a reference to all defined layouts
|
||||
fn layouts(&self) -> Ref<'_, HashMap<String, Rc<dyn LayoutType>>>;
|
||||
#[derive(Default)]
|
||||
pub struct LayoutHolder {
|
||||
layouts: HashMap<String, Rc<dyn LayoutType>>,
|
||||
}
|
||||
|
||||
/// gets a (mutable) reference to all defined layours
|
||||
fn layouts_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn LayoutType>>>;
|
||||
|
||||
fn get_layout(&self, layout_name: &str) -> Option<Rc<dyn LayoutType>> {
|
||||
self.layouts().get(layout_name).map(|layout| layout.clone())
|
||||
impl LayoutHolder {
|
||||
pub fn get(&self, layout_name: &str) -> Option<Rc<dyn LayoutType>> {
|
||||
self.layouts.get(layout_name).map(|layout| layout.clone())
|
||||
}
|
||||
|
||||
fn insert_layout(&self, layout: Rc<dyn LayoutType>) {
|
||||
self.layouts_mut().insert(layout.name().into(), layout);
|
||||
pub fn insert(&self, layout: Rc<dyn LayoutType>) {
|
||||
self.layouts.insert(layout.name().into(), layout);
|
||||
}
|
||||
}
|
|
@ -4,3 +4,6 @@ pub mod rule;
|
|||
pub mod source;
|
||||
pub mod state;
|
||||
pub mod util;
|
||||
pub mod style;
|
||||
pub mod layout;
|
||||
pub mod customstyle;
|
||||
|
|
|
@ -1,20 +1,28 @@
|
|||
use std::any::Any;
|
||||
use std::cell::Ref;
|
||||
use std::cell::RefMut;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use ariadne::Report;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use super::customstyle::CustomStyleHolder;
|
||||
use super::layout::LayoutHolder;
|
||||
use super::rule::Rule;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::state::StateHolder;
|
||||
use crate::document::customstyle::CustomStyleHolder;
|
||||
use super::state::RuleStateHolder;
|
||||
use super::style::StyleHolder;
|
||||
use crate::document::document::Document;
|
||||
use crate::document::document::DocumentAccessors;
|
||||
use crate::document::element::ContainerElement;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::document::layout::LayoutHolder;
|
||||
use crate::document::style::StyleHolder;
|
||||
use crate::elements::customstyle::CustomStyleRule;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::lua::kernel::Kernel;
|
||||
use crate::lua::kernel::KernelHolder;
|
||||
use crate::parser::source::SourceFile;
|
||||
use ariadne::Color;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -45,91 +53,239 @@ impl ReportColors {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait Parser: KernelHolder + StyleHolder + LayoutHolder + CustomStyleHolder {
|
||||
/// Gets the colors for formatting errors
|
||||
/// The state that is shared with the state's children
|
||||
pub struct SharedState {
|
||||
pub rule_state: RuleStateHolder,
|
||||
|
||||
/// The lua [`Kernel`]s
|
||||
pub kernels: KernelHolder,
|
||||
|
||||
/// The styles
|
||||
pub styles: StyleHolder,
|
||||
|
||||
/// The layouts
|
||||
pub layouts: LayoutHolder,
|
||||
|
||||
/// The custom styles
|
||||
pub custom_styles: CustomStyleHolder,
|
||||
}
|
||||
|
||||
impl SharedState {
|
||||
/// Construct a new empty shared state
|
||||
pub(self) fn new(parser: &dyn Parser) -> Self {
|
||||
let mut s = Self {
|
||||
rule_state: RuleStateHolder::default(),
|
||||
kernels: KernelHolder::default(),
|
||||
styles: StyleHolder::default(),
|
||||
layouts: LayoutHolder::default(),
|
||||
custom_styles: CustomStyleHolder::default(),
|
||||
};
|
||||
|
||||
// Register default kernel
|
||||
s.kernels
|
||||
.insert("main".to_string(), Kernel::new(parser));
|
||||
|
||||
parser.rules().iter().for_each(|rule| {
|
||||
rule.register_styles(&mut s.styles);
|
||||
rule.register_layouts(&mut s.layouts);
|
||||
});
|
||||
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
/// The state of the parser
|
||||
pub struct ParserState<'a, 'b> {
|
||||
/// The parser for which this state exists
|
||||
pub parser: &'a dyn Parser,
|
||||
|
||||
/// The (optional) parent state
|
||||
parent: Option<&'b ParserState<'a, 'b>>,
|
||||
|
||||
/// The position of the matches in the current state
|
||||
matches: RefCell<Vec<(usize, Option<Box<dyn Any>>)>>,
|
||||
|
||||
/// State shared among all states
|
||||
pub shared: Rc<RefCell<SharedState>>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> ParserState<'a, 'b> {
|
||||
/// Constructs a new state for a given parser with an optional parent
|
||||
///
|
||||
/// When colors are disabled, all colors should resolve to empty string
|
||||
fn colors(&self) -> &ReportColors;
|
||||
/// Parent should be None when parsing a brand new document.
|
||||
/// If you have to set the parent to Some(..) (e.g for imports or sub-document),
|
||||
/// be sure to use the [`ParserState::with_state`] method instead, this create a
|
||||
/// RAII lived state for use within bounded lifetime.
|
||||
pub fn new(parser: &'a dyn Parser, parent: Option<&'a ParserState<'a, 'b>>) -> Self {
|
||||
let matches = parser.rules().iter().map(|_| (0, None)).collect::<Vec<_>>();
|
||||
let shared = if let Some(parent) = &parent {
|
||||
parent.shared.clone()
|
||||
} else {
|
||||
Rc::new(RefCell::new(SharedState::new(parser)))
|
||||
};
|
||||
|
||||
/// Gets a reference to all the [`Rule`]s defined for the parser
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>>;
|
||||
/// Gets a mutable reference to all the [`Rule`]s defined for the parser
|
||||
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
|
||||
Self {
|
||||
parser,
|
||||
parent,
|
||||
matches: RefCell::new(matches),
|
||||
shared,
|
||||
}
|
||||
}
|
||||
|
||||
fn state(&self) -> Ref<'_, StateHolder>;
|
||||
fn state_mut(&self) -> RefMut<'_, StateHolder>;
|
||||
|
||||
fn has_error(&self) -> bool;
|
||||
|
||||
/// Add an [`Element`] to the [`Document`]
|
||||
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>);
|
||||
|
||||
/// Parse [`Source`] into a new [`Document`]
|
||||
fn parse<'a>(
|
||||
&self,
|
||||
source: Rc<dyn Source>,
|
||||
parent: Option<&'a dyn Document<'a>>,
|
||||
) -> Box<dyn Document<'a> + 'a>;
|
||||
|
||||
/// Parse [`Source`] into an already existing [`Document`]
|
||||
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>);
|
||||
}
|
||||
|
||||
pub trait ParserStrategy {
|
||||
fn add_rule(&mut self, rule: Box<dyn Rule>, after: Option<&'static str>) -> Result<(), String>;
|
||||
|
||||
fn update_matches(
|
||||
&self,
|
||||
cursor: &Cursor,
|
||||
matches: &mut Vec<(usize, Option<Box<dyn Any>>)>,
|
||||
) -> (Cursor, Option<&Box<dyn Rule>>, Option<Box<dyn Any>>);
|
||||
}
|
||||
|
||||
impl<T: Parser> ParserStrategy for T {
|
||||
fn add_rule(&mut self, rule: Box<dyn Rule>, after: Option<&'static str>) -> Result<(), String> {
|
||||
let rule_name = (*rule).name();
|
||||
/// Adds a new rule to the current state
|
||||
///
|
||||
/// This method will recursively modify the parent states's matches
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will fail if:
|
||||
/// * The name for the new rule clashes with an already existing rule
|
||||
/// * If after is Some(..), not finding the rule to insert after
|
||||
/// On failure, it is safe to continue using this state, however the added rule won't exists.
|
||||
/*
|
||||
pub fn add_rule(
|
||||
&mut self,
|
||||
rule: Box<dyn Rule>,
|
||||
after: Option<&'static str>,
|
||||
) -> Result<(), String> {
|
||||
// FIXME: This method should not modify the parser
|
||||
// Instead we should have some sort of list of references to rules
|
||||
// Also need to add a sorting key for rules, so they can be automatically registered, then sorted
|
||||
|
||||
// TODO2: Should also check for duplicate rules name when creating bindings...
|
||||
// Error on duplicate rule
|
||||
if let Some(_) = self.rules().iter().find(|rule| rule.name() == rule_name)
|
||||
if let Some(_) = self
|
||||
.parser
|
||||
.rules()
|
||||
.iter()
|
||||
.find(|other_rule| other_rule.name() == rule.name())
|
||||
{
|
||||
return Err(format!(
|
||||
"Attempted to introduce duplicate rule: `{rule_name}`"
|
||||
"Attempted to introduce duplicate rule: `{}`",
|
||||
rule.name()
|
||||
));
|
||||
}
|
||||
|
||||
match after {
|
||||
Some(name) => {
|
||||
let before = self
|
||||
.rules()
|
||||
// Try to insert after
|
||||
if let Some(after) = after {
|
||||
let index =
|
||||
self.parser.rules()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_pos, r)| (r).name() == name);
|
||||
.find(|(_, rule)| rule.name() == after)
|
||||
.map(|(idx, _)| idx);
|
||||
|
||||
match before {
|
||||
Some((pos, _)) => self.rules_mut().insert(pos + 1, rule),
|
||||
_ => {
|
||||
return Err(format!(
|
||||
"Unable to find rule named `{name}`, to insert rule `{}` after it",
|
||||
rule.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
if let Some(index) = index {
|
||||
self.parser.rules_mut().insert(index, rule);
|
||||
} else {
|
||||
return Err(format!("Unable to find rule `{after}` to insert after"));
|
||||
}
|
||||
_ => self.rules_mut().push(rule),
|
||||
} else {
|
||||
self.parser.rules_mut().push(rule);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
// Carry out the `matches` modification
|
||||
fn carry(state: &ParserState) {
|
||||
state.matches.borrow_mut().push((0, None));
|
||||
|
||||
fn update_matches(
|
||||
&self,
|
||||
cursor: &Cursor,
|
||||
matches: &mut Vec<(usize, Option<Box<dyn Any>>)>,
|
||||
) -> (Cursor, Option<&Box<dyn Rule>>, Option<Box<dyn Any>>) {
|
||||
// Update matches
|
||||
// TODO: Trivially parellalizable
|
||||
self.rules()
|
||||
if let Some(parent) = state.parent {
|
||||
carry(parent);
|
||||
}
|
||||
}
|
||||
carry(self);
|
||||
|
||||
// TODO2: Carry on bindings, style, layouts registration... into self.shared
|
||||
Ok(())
|
||||
}
|
||||
*/
|
||||
|
||||
/// Runs a procedure with a new state that inherits it's [`SharedState`] state from self
|
||||
///
|
||||
/// Note: When parsing a new document, create a default state, then the parsing process
|
||||
/// creates states using this method
|
||||
pub fn with_state<F, R>(&self, f: F) -> R
|
||||
where
|
||||
F: FnOnce(ParserState) -> R,
|
||||
{
|
||||
let new_state = ParserState::new(self.parser, Some(self));
|
||||
f(new_state)
|
||||
}
|
||||
|
||||
fn handle_reports(
|
||||
&self,
|
||||
source: Rc<dyn Source>,
|
||||
reports: Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>,
|
||||
) {
|
||||
for mut report in reports {
|
||||
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
|
||||
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
|
||||
sources.insert(source.clone());
|
||||
match source.location() {
|
||||
Some(parent) => {
|
||||
let parent_source = parent.source();
|
||||
if sources.get(&parent_source).is_none() {
|
||||
recurse_source(sources, parent_source);
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
report.labels.iter().for_each(|label| {
|
||||
recurse_source(&mut sources, label.span.0.clone());
|
||||
});
|
||||
|
||||
let cache = sources
|
||||
.iter()
|
||||
.map(|source| (source.clone(), source.content().clone()))
|
||||
.collect::<Vec<(Rc<dyn Source>, String)>>();
|
||||
|
||||
cache.iter().for_each(|(source, _)| {
|
||||
if let Some(location) = source.location() {
|
||||
if let Some(_s) = source.downcast_ref::<SourceFile>() {
|
||||
report.labels.push(
|
||||
Label::new((location.source(), location.start() + 1..location.end()))
|
||||
.with_message("In file included from here")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
|
||||
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
|
||||
let start = location.start()
|
||||
+ (location.source().content().as_bytes()[location.start()]
|
||||
== '\n' as u8)
|
||||
.then_some(1)
|
||||
.unwrap_or(0);
|
||||
report.labels.push(
|
||||
Label::new((location.source(), start..location.end()))
|
||||
.with_message("In evaluation of")
|
||||
.with_order(-1),
|
||||
);
|
||||
};
|
||||
}
|
||||
});
|
||||
report.eprint(ariadne::sources(cache)).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates matches from a given start position e.g [`Cursor`]
|
||||
///
|
||||
/// # Return
|
||||
/// 1. The cursor position after updating the matches
|
||||
/// 2. (Optional) The winning match with it's match data
|
||||
///
|
||||
/// If the winning match is None, it means that the document has no more rule to match
|
||||
/// I.E The rest of the content should be added as a [`Text`] element.
|
||||
/// The match data should be passed to the [`Rule::on_match`] method
|
||||
pub fn update_matches(
|
||||
&self,
|
||||
cursor: &Cursor,
|
||||
) -> (Cursor, Option<(usize, Box<dyn Any>)>) {
|
||||
let mut matches_borrow = self.matches.borrow_mut();
|
||||
|
||||
self.parser.rules()
|
||||
.iter()
|
||||
.zip(matches.iter_mut())
|
||||
.zip(matches_borrow.iter_mut())
|
||||
.for_each(|(rule, (matched_at, match_data))| {
|
||||
// Don't upate if not stepped over yet
|
||||
if *matched_at > cursor.pos && rule.downcast_ref::<CustomStyleRule>().is_none() {
|
||||
|
@ -170,23 +326,131 @@ impl<T: Parser> ParserStrategy for T {
|
|||
});
|
||||
|
||||
// Get winning match
|
||||
let (winner, (next_pos, _match_data)) = matches
|
||||
let (winner, next_pos) = matches_borrow
|
||||
.iter()
|
||||
.enumerate()
|
||||
.min_by_key(|(_, (pos, _match_data))| pos)
|
||||
.min_by_key(|(_, (pos, _))| pos)
|
||||
.map(|(winner, (pos, _))| (winner, *pos))
|
||||
.unwrap();
|
||||
if *next_pos == usize::MAX
|
||||
// No rule has matched
|
||||
|
||||
if next_pos == usize::MAX // No rule has matched
|
||||
{
|
||||
let content = cursor.source.content();
|
||||
// No winners, i.e no matches left
|
||||
return (cursor.at(content.len()), None, None);
|
||||
return (cursor.at(content.len()), None);
|
||||
}
|
||||
|
||||
(
|
||||
cursor.at(*next_pos),
|
||||
Some(&self.rules()[winner]),
|
||||
std::mem::replace(&mut matches[winner].1, None),
|
||||
)
|
||||
}
|
||||
return (cursor.at(next_pos),
|
||||
Some((winner, matches_borrow[0].1.take().unwrap())))
|
||||
|
||||
}
|
||||
|
||||
/// Add an [`Element`] to the [`Document`]
|
||||
fn push(&mut self, doc: &dyn Document, elem: Box<dyn Element>) {
|
||||
if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
|
||||
let mut paragraph = doc
|
||||
.last_element_mut::<Paragraph>()
|
||||
.or_else(|| {
|
||||
doc.push(Box::new(Paragraph {
|
||||
location: elem.location().clone(),
|
||||
content: Vec::new(),
|
||||
}));
|
||||
doc.last_element_mut::<Paragraph>()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
paragraph.push(elem).unwrap();
|
||||
} else {
|
||||
// Process paragraph events
|
||||
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
|
||||
self.handle_reports(
|
||||
doc.source(),
|
||||
self.shared.rule_state
|
||||
.on_scope_end(&mut self, doc, super::state::Scope::PARAGRAPH),
|
||||
);
|
||||
}
|
||||
|
||||
doc.push(elem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Parser {
|
||||
/// Gets the colors for formatting errors
|
||||
///
|
||||
/// When colors are disabled, all colors should resolve to empty string
|
||||
fn colors(&self) -> &ReportColors;
|
||||
|
||||
/// Gets a reference to all the [`Rule`]s defined for the parser
|
||||
fn rules(&self) -> &Vec<Box<dyn Rule>>;
|
||||
/// Gets a mutable reference to all the [`Rule`]s defined for the parser
|
||||
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
|
||||
|
||||
/// Whether the parser emitted an error during it's parsing process
|
||||
fn has_error(&self) -> bool;
|
||||
|
||||
/// Add an [`Element`] to the [`Document`]
|
||||
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>);
|
||||
|
||||
/// Parse [`Source`] into a new [`Document`]
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method will not fail because we try to optimistically recover from parsing errors.
|
||||
/// However the resulting document should not get compiled if an error has happened
|
||||
/// see [`Parser::has_error()`] for reference
|
||||
fn parse<'a>(
|
||||
&self,
|
||||
state: ParserState,
|
||||
source: Rc<dyn Source>,
|
||||
parent: Option<&'a dyn Document<'a>>,
|
||||
) -> Box<dyn Document<'a> + 'a>;
|
||||
|
||||
/// Parse [`Source`] into an already existing [`Document`]
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method will not fail because we try to optimistically recover from parsing errors.
|
||||
/// However the resulting document should not get compiled if an error has happened
|
||||
/// see [`Parser::has_error()`] for reference
|
||||
fn parse_into<'a>(&self,
|
||||
state: ParserState,
|
||||
source: Rc<dyn Source>, document: &'a dyn Document<'a>);
|
||||
|
||||
fn add_rule(
|
||||
&mut self,
|
||||
rule: Box<dyn Rule>,
|
||||
after: Option<&'static str>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(_) = self
|
||||
.rules()
|
||||
.iter()
|
||||
.find(|other_rule| other_rule.name() == rule.name())
|
||||
{
|
||||
return Err(format!(
|
||||
"Attempted to introduce duplicate rule: `{}`",
|
||||
rule.name()
|
||||
));
|
||||
}
|
||||
|
||||
// Try to insert after
|
||||
if let Some(after) = after {
|
||||
let index =
|
||||
self.rules()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, rule)| rule.name() == after)
|
||||
.map(|(idx, _)| idx);
|
||||
|
||||
if let Some(index) = index {
|
||||
self.rules_mut().insert(index, rule);
|
||||
} else {
|
||||
return Err(format!("Unable to find rule `{after}` to insert after"));
|
||||
}
|
||||
} else {
|
||||
self.rules_mut().push(rule);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
use super::layout::LayoutHolder;
|
||||
use super::parser::Parser;
|
||||
use super::parser::ParserState;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::source::Token;
|
||||
use super::style::StyleHolder;
|
||||
use crate::document::document::Document;
|
||||
use ariadne::Report;
|
||||
use downcast_rs::impl_downcast;
|
||||
|
@ -17,23 +20,24 @@ pub trait Rule: Downcast {
|
|||
/// Returns rule's name
|
||||
fn name(&self) -> &'static str;
|
||||
/// Finds the next match starting from [`cursor`]
|
||||
fn next_match(&self, parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
|
||||
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
|
||||
/// Callback when rule matches
|
||||
fn on_match<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
cursor: Cursor,
|
||||
match_data: Option<Box<dyn Any>>,
|
||||
match_data: Box<dyn Any>,
|
||||
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>);
|
||||
/// Export bindings to lua
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
|
||||
/// Registers lua bindings
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||
|
||||
/// Registers default styles
|
||||
fn register_styles(&self, _parser: &dyn Parser) {}
|
||||
fn register_styles(&self, holder: &mut StyleHolder) {}
|
||||
|
||||
/// Registers default layouts
|
||||
fn register_layouts(&self, _parser: &dyn Parser) {}
|
||||
fn register_layouts(&self, holder: &mut LayoutHolder) {}
|
||||
}
|
||||
impl_downcast!(Rule);
|
||||
|
||||
|
@ -53,24 +57,22 @@ pub trait RegexRule {
|
|||
fn on_regex_match<'a>(
|
||||
&self,
|
||||
index: usize,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
token: Token,
|
||||
matches: regex::Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>;
|
||||
|
||||
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> { None }
|
||||
fn register_styles(&self, _parser: &dyn Parser) {}
|
||||
fn register_layouts(&self, _parser: &dyn Parser) {}
|
||||
fn register_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||
fn register_styles(&self, _holder: &mut StyleHolder) {}
|
||||
fn register_layouts(&self, _holder: &mut LayoutHolder) {}
|
||||
}
|
||||
|
||||
impl<T: RegexRule + 'static> Rule for T {
|
||||
fn name(&self) -> &'static str {
|
||||
RegexRule::name(self)
|
||||
}
|
||||
fn name(&self) -> &'static str { RegexRule::name(self) }
|
||||
|
||||
/// Finds the next match starting from [`cursor`]
|
||||
fn next_match(&self, _parser: &dyn Parser, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
let content = cursor.source.content();
|
||||
let mut found: Option<(usize, usize)> = None;
|
||||
self.regexes().iter().enumerate().for_each(|(id, re)| {
|
||||
|
@ -92,18 +94,13 @@ impl<T: RegexRule + 'static> Rule for T {
|
|||
|
||||
fn on_match<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &'a (dyn Document<'a> + 'a),
|
||||
cursor: Cursor,
|
||||
match_data: Option<Box<dyn Any>>,
|
||||
match_data: Box<dyn Any>,
|
||||
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
|
||||
let content = cursor.source.content();
|
||||
let index = unsafe {
|
||||
match_data
|
||||
.unwrap_unchecked()
|
||||
.downcast::<usize>()
|
||||
.unwrap_unchecked()
|
||||
};
|
||||
let index = match_data.downcast::<usize>().unwrap();
|
||||
let re = &self.regexes()[*index];
|
||||
|
||||
let captures = re.captures_at(content.as_str(), cursor.pos).unwrap();
|
||||
|
@ -112,19 +109,15 @@ impl<T: RegexRule + 'static> Rule for T {
|
|||
let token_end = token.end();
|
||||
return (
|
||||
cursor.at(token_end),
|
||||
self.on_regex_match(*index, parser, document, token, captures),
|
||||
self.on_regex_match(*index, state, document, token, captures),
|
||||
);
|
||||
}
|
||||
|
||||
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Option<Vec<(String, Function<'lua>)>> {
|
||||
self.lua_bindings(lua)
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
self.register_bindings(lua)
|
||||
}
|
||||
|
||||
fn register_styles(&self, parser: &dyn Parser) {
|
||||
self.register_styles(parser);
|
||||
}
|
||||
fn register_styles(&self, holder: &mut StyleHolder) { self.register_styles(holder); }
|
||||
|
||||
fn register_layouts(&self, parser: &dyn Parser) {
|
||||
self.register_layouts(parser);
|
||||
}
|
||||
fn register_layouts(&self, holder: &mut LayoutHolder) { self.register_layouts(holder); }
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
@ -9,7 +8,7 @@ use downcast_rs::Downcast;
|
|||
|
||||
use crate::document::document::Document;
|
||||
|
||||
use super::parser::Parser;
|
||||
use super::parser::ParserState;
|
||||
use super::source::Source;
|
||||
|
||||
/// Scope for state objects
|
||||
|
@ -25,75 +24,66 @@ pub enum Scope {
|
|||
PARAGRAPH = 2,
|
||||
}
|
||||
|
||||
pub trait State: Downcast {
|
||||
pub trait RuleState: Downcast {
|
||||
/// Returns the state's [`Scope`]
|
||||
fn scope(&self) -> Scope;
|
||||
|
||||
/// Callback called when state goes out of scope
|
||||
fn on_remove<'a>(
|
||||
&self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>;
|
||||
}
|
||||
impl_downcast!(State);
|
||||
impl_downcast!(RuleState);
|
||||
|
||||
impl core::fmt::Debug for dyn State {
|
||||
impl core::fmt::Debug for dyn RuleState {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "State{{Scope: {:#?}}}", self.scope())
|
||||
}
|
||||
}
|
||||
|
||||
/// Object owning all the states
|
||||
#[derive(Debug)]
|
||||
pub struct StateHolder {
|
||||
data: HashMap<String, Rc<RefCell<dyn State>>>,
|
||||
#[derive(Default)]
|
||||
pub struct RuleStateHolder {
|
||||
states: HashMap<String, Rc<dyn RuleState>>,
|
||||
}
|
||||
|
||||
impl StateHolder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
impl RuleStateHolder {
|
||||
// Attempts to push [`state`]. On collision, returns an error with the already present state
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
name: String,
|
||||
state: Rc<RefCell<dyn State>>,
|
||||
) -> Result<Rc<RefCell<dyn State>>, Rc<RefCell<dyn State>>> {
|
||||
match self.data.insert(name, state.clone()) {
|
||||
Some(state) => Err(state),
|
||||
_ => Ok(state),
|
||||
}
|
||||
state: Rc<dyn RuleState>,
|
||||
) {
|
||||
self.states.insert(name, state.clone());
|
||||
}
|
||||
|
||||
pub fn query(&self, name: &String) -> Option<Rc<RefCell<dyn State>>> {
|
||||
self.data.get(name).map_or(None, |st| Some(st.clone()))
|
||||
pub fn get(&self, state_name: &str) -> Option<Rc<dyn RuleState>> {
|
||||
self.states.get(state_name)
|
||||
.map(|state| state.clone())
|
||||
}
|
||||
|
||||
pub fn on_scope_end(
|
||||
&mut self,
|
||||
parser: &dyn Parser,
|
||||
state: &mut ParserState,
|
||||
document: &dyn Document,
|
||||
scope: Scope,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
let mut result = vec![];
|
||||
let mut reports = vec![];
|
||||
|
||||
self.data.retain(|_name, state| {
|
||||
if state.borrow().scope() >= scope {
|
||||
state
|
||||
.borrow()
|
||||
.on_remove(parser, document)
|
||||
self.states.retain(|_name, rule_state| {
|
||||
if rule_state.scope() >= scope {
|
||||
rule_state
|
||||
.on_remove(state, document)
|
||||
.drain(..)
|
||||
.for_each(|report| result.push(report));
|
||||
.for_each(|report| reports.push(report));
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
return reports;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::cell::Ref;
|
||||
use std::cell::RefMut;
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
|
||||
|
@ -27,48 +25,53 @@ pub trait ElementStyle: Downcast + core::fmt::Debug {
|
|||
}
|
||||
impl_downcast!(ElementStyle);
|
||||
|
||||
pub trait StyleHolder {
|
||||
/// gets a reference to all defined styles
|
||||
fn element_styles(&self) -> Ref<'_, HashMap<String, Rc<dyn ElementStyle>>>;
|
||||
|
||||
/// gets a (mutable) reference to all defined styles
|
||||
fn element_styles_mut(&self) -> RefMut<'_, HashMap<String, Rc<dyn ElementStyle>>>;
|
||||
#[derive(Default)]
|
||||
pub struct StyleHolder {
|
||||
styles: HashMap<String, Rc<dyn ElementStyle>>,
|
||||
}
|
||||
|
||||
impl StyleHolder {
|
||||
/// Checks if a given style key is registered
|
||||
fn is_style_registered(&self, style_key: &str) -> bool { self.element_styles().contains_key(style_key) }
|
||||
fn is_registered(&self, style_key: &str) -> bool { self.styles.contains_key(style_key) }
|
||||
|
||||
/// Gets the current active style for an element
|
||||
/// NOTE: Will panic if a style is not defined for a given element
|
||||
/// If you need to process user input, use [`is_registered`]
|
||||
fn current_style(&self, style_key: &str) -> Rc<dyn ElementStyle> {
|
||||
self.element_styles().get(style_key).map(|rc| rc.clone()).unwrap()
|
||||
fn current(&self, style_key: &str) -> Rc<dyn ElementStyle> {
|
||||
self.styles.get(style_key).map(|rc| rc.clone()).unwrap()
|
||||
}
|
||||
|
||||
/// Sets the [`style`]
|
||||
fn set_current_style(&self, style: Rc<dyn ElementStyle>) {
|
||||
self.element_styles_mut().insert(style.key().to_string(), style);
|
||||
fn set_current(&mut self, style: Rc<dyn ElementStyle>) {
|
||||
self.styles.insert(style.key().to_string(), style);
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_elementstyle {
|
||||
($t:ty, $key:expr) => {
|
||||
impl ElementStyle for $t {
|
||||
impl crate::parser::style::ElementStyle for $t {
|
||||
fn key(&self) -> &'static str { $key }
|
||||
|
||||
fn from_json(&self, json: &str) -> Result<std::rc::Rc<dyn ElementStyle>, String> {
|
||||
fn from_json(
|
||||
&self,
|
||||
json: &str,
|
||||
) -> Result<std::rc::Rc<dyn crate::parser::style::ElementStyle>, String> {
|
||||
serde_json::from_str::<$t>(json)
|
||||
.map_err(|e| e.to_string())
|
||||
.map(|obj| std::rc::Rc::new(obj) as std::rc::Rc<dyn ElementStyle>)
|
||||
.map(|obj| {
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn crate::parser::style::ElementStyle>
|
||||
})
|
||||
}
|
||||
|
||||
fn from_lua(
|
||||
&self,
|
||||
lua: &mlua::Lua,
|
||||
value: mlua::Value,
|
||||
) -> Result<std::rc::Rc<dyn ElementStyle>, mlua::Error> {
|
||||
mlua::LuaSerdeExt::from_value::<$t>(lua, value)
|
||||
.map(|obj| std::rc::Rc::new(obj) as std::rc::Rc<dyn ElementStyle>)
|
||||
) -> Result<std::rc::Rc<dyn crate::parser::style::ElementStyle>, mlua::Error> {
|
||||
mlua::LuaSerdeExt::from_value::<$t>(lua, value).map(|obj| {
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn crate::parser::style::ElementStyle>
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
|
@ -8,7 +8,7 @@ use crate::document::document::DocumentAccessors;
|
|||
use crate::document::element::ElemKind;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
|
||||
use super::parser::Parser;
|
||||
use super::parser::ParserState;
|
||||
use super::source::Source;
|
||||
|
||||
/// Processes text for escape characters and paragraphing
|
||||
|
@ -136,18 +136,21 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
|
|||
/// Parses source into a single paragraph
|
||||
/// If source contains anything but a single paragraph, an error is returned
|
||||
pub fn parse_paragraph<'a>(
|
||||
parser: &dyn Parser,
|
||||
state: &ParserState,
|
||||
source: Rc<dyn Source>,
|
||||
document: &'a dyn Document<'a>,
|
||||
) -> Result<Box<Paragraph>, &'static str> {
|
||||
let parsed = parser.parse(source.clone(), Some(document));
|
||||
let parsed = state.with_state(|new_state| -> Box<dyn Document> {
|
||||
new_state.parser.parse(new_state, source.clone(), Some(document))
|
||||
});
|
||||
if parsed.content().borrow().len() > 1 {
|
||||
return Err("Parsed document contains more than a single paragraph");
|
||||
} else if parsed.content().borrow().len() == 0 {
|
||||
return Err("Parsed document is empty");
|
||||
} else if parsed.last_element::<Paragraph>().is_none() {
|
||||
return Err("Parsed element is not a paragraph");
|
||||
} else if parser.has_error() {
|
||||
} else if state.parser.has_error() {
|
||||
// FIXME: If parser had an error before, this wold trigger
|
||||
return Err("Parser error");
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue