Compare commits

..

No commits in common. "master" and "batch" have entirely different histories.

66 changed files with 2183 additions and 7986 deletions

96
Cargo.lock generated
View file

@ -56,17 +56,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
]
[[package]]
name = "auto-registry"
version = "0.0.4"
dependencies = [
"lazy_static",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.53",
]
[[package]]
@ -77,7 +67,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -287,16 +277,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "erased-serde"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d"
dependencies = [
"serde",
"typeid",
]
[[package]]
name = "errno"
version = "0.3.9"
@ -409,7 +389,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -716,13 +696,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d111deb18a9c9bd33e1541309f4742523bfab01d276bfa9a27519f6de9c11dc7"
dependencies = [
"bstr",
"erased-serde",
"mlua-sys",
"num-traits",
"once_cell",
"rustc-hash",
"serde",
"serde-value",
]
[[package]]
@ -743,7 +720,6 @@ name = "nml"
version = "0.1.0"
dependencies = [
"ariadne",
"auto-registry",
"dashmap 6.0.1",
"downcast-rs",
"getopts",
@ -752,9 +728,7 @@ dependencies = [
"lsp-server",
"lsp-types 0.97.0",
"mlua",
"rand 0.8.5",
"regex",
"runtime-format",
"rusqlite",
"rust-crypto",
"serde",
@ -828,15 +802,6 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "ordered-float"
version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]]
name = "parking_lot_core"
version = "0.9.10"
@ -887,7 +852,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -918,7 +883,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -966,9 +931,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "proc-macro2"
version = "1.0.86"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
@ -984,9 +949,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.36"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
@ -1106,15 +1071,6 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "runtime-format"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09958d5b38bca768ede7928c767c89a08ba568144a7b61992aecae79b03c8c94"
dependencies = [
"tinyvec",
]
[[package]]
name = "rusqlite"
version = "0.31.0"
@ -1203,16 +1159,6 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde-value"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
dependencies = [
"ordered-float",
"serde",
]
[[package]]
name = "serde_derive"
version = "1.0.204"
@ -1221,7 +1167,7 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1243,7 +1189,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1285,9 +1231,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.72"
version = "2.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032"
dependencies = [
"proc-macro2",
"quote",
@ -1345,7 +1291,7 @@ checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1425,7 +1371,7 @@ checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1492,7 +1438,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1520,7 +1466,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]
[[package]]
@ -1532,12 +1478,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "typeid"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "059d83cc991e7a42fc37bd50941885db0888e34209f8cfd9aab07ddec03bc9cf"
[[package]]
name = "typenum"
version = "1.17.0"
@ -1783,5 +1723,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
"syn 2.0.53",
]

View file

@ -17,7 +17,6 @@ inherits = "release"
debug = true
[dependencies]
auto-registry = { path = "crates/auto-registry" }
ariadne = "0.4.1"
dashmap = "6.0.1"
downcast-rs = "1.2.1"
@ -26,23 +25,15 @@ graphviz-rust = "0.9.0"
lazy_static = "1.5.0"
lsp-server = "0.7.6"
lsp-types = "0.97.0"
mlua = { version = "0.9.9", features = ["lua54", "vendored", "serialize"] }
mlua = { version = "0.9.9", features = ["lua54", "vendored"] }
regex = "1.10.3"
rusqlite = "0.31.0"
rust-crypto = "0.2.36"
serde = "1.0.204"
serde_json = "1.0.120"
syntect = "5.2.0"
tokio = { version = "1.38.1", features = [
"macros",
"rt-multi-thread",
"io-std",
] }
tokio = { version = "1.38.1", features = ["macros", "rt-multi-thread", "io-std"]}
tower-lsp = "0.20.0"
unicode-segmentation = "1.11.0"
walkdir = "2.5.0"
runtime-format = "0.1.3"
[dev-dependencies]
rand = "0.8.5"

View file

@ -35,9 +35,9 @@ cargo build --release --bin nml
- [x] LaTeX rendering
- [x] Graphviz rendering
- [x] Media
- [x] References
- [x] Navigation
- [x] Cross-Document references
- [ ] References
- [ ] Navigation
- [ ] Cross-Document references
- [ ] Complete Lua api
- [ ] Documentation
- [ ] Table

View file

@ -1,54 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "auto-registry"
version = "0.0.4"
dependencies = [
"lazy_static",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "proc-macro2"
version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View file

@ -1,13 +0,0 @@
[package]
name = "auto-registry"
version = "0.0.4"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0"}
quote = "1.0"
syn = { version = "1.0", features = [ "full" ] }
lazy_static = "1.5.0"

View file

@ -1,296 +0,0 @@
#![feature(proc_macro_span)]
use std::cell::RefCell;
use std::collections::HashMap;
use lazy_static::lazy_static;
use proc_macro::TokenStream;
use quote::quote;
use std::sync::Mutex;
use syn::parse::Parse;
use syn::parse::ParseStream;
use syn::parse_macro_input;
use syn::ItemStruct;
lazy_static! {
/// The registry, each key corresponds to an identifier that needs to be
/// valid in the context of the [`genegenerate_registry`] macro.
static ref REGISTRY: Mutex<RefCell<HashMap<String, Vec<String>>>> =
Mutex::new(RefCell::new(HashMap::new()));
}
/// Arguments for the [`auto_registry`] proc macro
struct AutoRegistryArgs {
/// The registry name
registry: syn::LitStr,
/// The absolute path to the struct, if not specified the macro will try
/// to automatically infer the full path.
path: Option<syn::LitStr>,
}
/// Parser for [`AutoRegistryArgs`]
impl Parse for AutoRegistryArgs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut registry = None;
let mut path = None;
loop {
let key: syn::Ident = input.parse()?;
input.parse::<syn::Token![=]>()?;
let value: syn::LitStr = input.parse()?;
match key.to_string().as_str() {
"registry" => registry = Some(value),
"path" => path = Some(value),
_ => {
return Err(syn::Error::new(
key.span(),
format!(
"Unknown attribute `{}`, excepted `registry` or `path`",
key.to_string()
),
))
}
}
if input.is_empty() {
break;
}
input.parse::<syn::Token![,]>()?;
}
if registry.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `registry`".to_string(),
));
}
Ok(AutoRegistryArgs {
registry: registry.unwrap(),
path,
})
}
}
/// The proc macro used on a struct to add it to the registry
///
/// # Attributes
/// - registry: (String) Name of the registry to collect the struct into
/// - path: (Optional String) The crate path in which the struct is located
/// If left empty, the path will be try to be automatically-deduced
///
/// # Note
///
/// Due to a lacking implementation of `proc_macro_span` in rust-analyzer,
/// it is highly advised the set the `path` attribute when using this macro.
/// See https://github.com/rust-lang/rust-analyzer/issues/15950
#[proc_macro_attribute]
pub fn auto_registry(attr: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as AutoRegistryArgs);
let input = parse_macro_input!(input as ItemStruct);
let ident = &input.ident;
let path = if let Some(path) = args.path {
let value = path.value();
if value.is_empty() {
value
} else {
format!("{}::{}", value, ident.to_string().as_str())
}
} else {
// Attempt to get the path in a hacky way in case the path wasn't
// specified as an attribute to the macro
let path = match input
.ident
.span()
.unwrap()
.source_file()
.path()
.canonicalize()
{
Ok(path) => path,
Err(e) => {
return syn::Error::new(
input.ident.span(),
format!("Failed to canonicalize path: {}", e),
)
.to_compile_error()
.into();
}
};
let crate_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let relative_path = path.strip_prefix(&crate_path).unwrap();
let relative_path_str = relative_path.to_string_lossy();
// Remove the first path component e.g "src/"
let pos = if let Some(pos) = relative_path_str.find("/") {
pos + 1
} else {
0
};
let module_path = relative_path_str
.split_at(pos)
.1
.strip_suffix(".rs")
.unwrap()
.replace("/", "::");
if module_path.is_empty() {
format!("crate::{}", ident.to_string())
} else {
format!("crate::{module_path}::{}", ident.to_string())
}
};
let reg_mtx = REGISTRY.lock().unwrap();
let mut reg_borrow = reg_mtx.borrow_mut();
if let Some(ref mut vec) = reg_borrow.get_mut(args.registry.value().as_str()) {
vec.push(path);
} else {
reg_borrow.insert(args.registry.value(), vec![path]);
}
quote! {
#input
}
.into()
}
/// Arguments for the [`generate_registry`] proc macro
struct GenerateRegistryArgs {
/// The registry name
registry: syn::LitStr,
/// The target, i.e the generated function name
target: syn::Ident,
/// The maker macro, takes all constructed items and processes them
maker: syn::Expr,
/// The return type for the function
return_type: syn::Type,
}
/// Parser for [`GenerateRegistryArgs`]
impl Parse for GenerateRegistryArgs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut registry = None;
let mut target = None;
let mut maker = None;
let mut return_type = None;
loop {
let key: syn::Ident = input.parse()?;
input.parse::<syn::Token![=]>()?;
match key.to_string().as_str() {
"registry" => registry = Some(input.parse()?),
"target" => target = Some(input.parse()?),
"maker" => maker = Some(input.parse()?),
"return_type" => return_type = Some(input.parse()?),
_ => {
return Err(syn::Error::new(
key.span(),
format!(
"Unknown attribute `{}`, excepted `registry` or `target`",
key.to_string()
),
))
}
}
if input.is_empty() {
break;
}
input.parse::<syn::Token![,]>()?;
}
if registry.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `registry`".to_string(),
));
} else if target.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `target`".to_string(),
));
} else if maker.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `maker`".to_string(),
));
} else if return_type.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `return_type`".to_string(),
));
}
Ok(GenerateRegistryArgs {
registry: registry.unwrap(),
target: target.unwrap(),
maker: maker.unwrap(),
return_type: return_type.unwrap(),
})
}
}
/// The proc macro that generates the function to build the registry
///
/// # Attributes
/// - registry: (String) Name of the registry to generate
/// - target: (Identifier) Name of the resulting function
/// - maker: (Macro) A macro that will take all the newly constructed objects
/// comma-separated and create the resulting expression
/// - return_type: (Type) The return type of the generated function.
/// Must match the type of the macro invocation
///
/// # Example
/// ```
/// macro_rules! create_listeners {
/// ( $($construct:expr),+ $(,)? ) => {{
/// vec![$(Box::new($construct) as Box<dyn Listener>,)+]
/// }};
/// }
/// #[generate_registry(
/// registry = "listeners",
/// target = build_listeners,
/// return_type = Vec<Box<dyn Listener>>,
/// maker = create_listeners)]
///
/// fn main()
/// {
/// let all_listeners : Vec<Box<dyn Listener>> = build_listeners();
/// }
/// ```
#[proc_macro_attribute]
pub fn generate_registry(attr: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as GenerateRegistryArgs);
let reg_mtx = REGISTRY.lock().unwrap();
let mut stream = proc_macro2::TokenStream::new();
if let Some(names) = reg_mtx.borrow().get(args.registry.value().as_str()) {
for name in names {
let struct_name: proc_macro2::TokenStream = name.parse().unwrap();
stream.extend(quote::quote_spanned!(proc_macro2::Span::call_site() =>
#struct_name::new(),
));
}
} else {
panic!(
"Unable to find registry item with key=`{}`",
args.registry.value()
);
}
let function = args.target;
let return_type = args.return_type;
let maker = args.maker;
let rest: proc_macro2::TokenStream = input.into();
quote! {
fn #function() -> #return_type {
#maker!(
#stream
)
}
#rest
}
.into()
}

View file

@ -1,59 +0,0 @@
@import ../template.nml
@nav.previous = Blockquote
%<make_doc({"Blocks"}, "Blockquotes", "Blockquotes")>%
# Blockquotes
>[author=Lennart Poettering, cite=SystemD github issue 5998, url=https://github.com/systemd/systemd/pull/5998]
>>IMO, you shouldn't see the assignment of a CVE as a negative thing. The bug exists whether or not a CVE is assigned. The assignment of a CVE allows for people to consider what this issue means for them.
>
>Well, that makes no sense. You don't assign CVEs to every single random bugfix we do, do you? So why this one? I understand your currency is CVEs, but this just makes CVEs useless. And hardly anymore useful than a git history...
>
>I mean, I am fine with security bureaucracy if it actually helps anyone, but you just create noise where there shouldn't be any. And that way you just piss off the upstreams whose cooperation you actually should be interested in. Your at least made sure that my own interest in helping your efforts goes to zero...
# Nesting blockquotes
> Quotes can be nested
>> Here's a subquote
>>>[author=With author, cite=With cite]
>>> Here's another subquote
>> Back to the subquote
>
>> Another subquote
> This issue is getting a bit too heated, locking right now
```Markdown, Given by the following
> Nest quotes can be nested
>> Here's a subquote
>>>[author=With author, cite=With cite]
>>> Here's another subquote
>> Back to the subquote
>
>> Another subquote
> This issue is getting a bit too heated, locking right now
```
# Properties
Properties must be specified on the first `>` of the quote, inside brackets.
* ``author`` The quote author
* ``cite`` The quote source name
* ``url`` The quote source url (used for accessibility)
# Blockquotes styling
The blockquotes styling controls how the author, cite and url are rendered. This is controlled by style key ``style.blockquote``.
* ``author_pos`` Position of the author statement, available options:
*- `None` Hides the author
*- `Before` Displays the author before the quote
*- `After` Displays the author after the quote (default)
* ``format`` An array with 3 format strings to control how the author is displayed:
*-[offset=0] Format for author+cite
*- Format for author onl
*- Format for cite only
```JSON, Default Style
{
"author_pos": "After",
"format": ["{author}, {cite}", "{author}", "{cite}"],
}
```

View file

@ -1,344 +0,0 @@
@import ../template.nml
%<make_doc({"External Tools"}, "Graphviz", "Graphviz")>%
# Graphs from graphviz
#+LAYOUT_BEGIN Centered
[graph][width=50%]
digraph {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[shape=box,fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray];
filelist [color=orange, label="File List"];
doclist [color=orange, label="Document List"];
iscached [shape=diamond, color=red, label="Cached?"];
parse [color=white, label=Parse];
compile [color=white, label=Compile];
cache [color=orange, label=Cache];
filelist -> iscached;
iscached -> cache[dir=both,color=lightblue,style=dashed];
iscached -> doclist[label="Yes",color=lightblue,style=dashed];
iscached -> parse[label="No",color=lightblue,style=dashed];
subgraph cluster_0 {
style=dotted;
color=white;
label = "Processing";
labeljust="l";
parse -> compile;
}
compile -> doclist[label=""];
buildnav [color=white, label="Build Navigation"];
xref [color=white, label="Resolve Cross-References"];
doclist -> xref;
doclist -> buildnav[label="Cached",color=lightblue,style=dashed];
subgraph cluster_1 {
style=dotted;
color=white;
label = "Post-Processing";
labeljust="l";
xref -> buildnav;
}
xref -> cache[color=lightblue,style=dashed];
output [color=orange, label="Output"];
buildnav -> output;
}
[/graph]
#+LAYOUT_END
The Graphviz functionnality requires the `dot` executable. More information on [Graphviz's website](file:///home/baraquiel/Programming/nml_rs/out/Graphviz.html).
# Synopsis
Graphs blocks are delimited by `` [graph]...[/graph]``
# Properties
* ``layout`` The layout engine, defaults to `dot`
see [Graphviz's documentation](https://graphviz.org/docs/layouts/). Allowed values:
*- [`dot`](https://graphviz.org/docs/layouts/dot/)
*- [`neato`](https://graphviz.org/docs/layouts/neato/)
*- [`fdp`](https://graphviz.org/docs/layouts/fdp/)
*- [`sfdp`](https://graphviz.org/docs/layouts/sfdp/)
*- [`circo`](https://graphviz.org/docs/layouts/circo/)
*- [`twopi`](https://graphviz.org/docs/layouts/twopi/)
*- [`osage`](https://graphviz.org/docs/layouts/osage/)
*- [`patchwork`](https://graphviz.org/docs/layouts/patchwork/)
* ``width`` The resulting svg's width property, defaults to `100%`
# Examples
#+LAYOUT_BEGIN[style=flex:0.33] Split
[graph]
digraph UML_Class_diagram {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray90];
graph [
label="UML Class diagram demo"
labelloc="t"
fontname="Helvetica,Arial,sans-serif"
]
node [
fontname="Helvetica,Arial,sans-serif"
shape=record
style=filled
fillcolor=gray95
]
edge [fontname="Helvetica,Arial,sans-serif"]
edge [arrowhead=vee style=dashed]
Client -> Interface1 [label=dependency]
Client -> Interface2
edge [dir=back arrowtail=empty style=""]
Interface1 -> Class1 [xlabel=inheritance]
Interface2 -> Class1 [dir=none]
Interface2 [label="" xlabel="Simple\ninterface" shape=circle]
Interface1[label = <{<b>«interface» I/O</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
Class1[label = <{<b>I/O class</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
edge [dir=back arrowtail=empty style=dashed]
Class1 -> System_1 [label=implementation]
System_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>System</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left" >+ property</td> </tr>
<tr> <td port="ss1" align="left" >- Subsystem 1</td> </tr>
<tr> <td port="ss2" align="left" >- Subsystem 2</td> </tr>
<tr> <td port="ss3" align="left" >- Subsystem 3</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">+ method<br/>...<br align="left"/></td> </tr>
</table>>
]
edge [dir=back arrowtail=diamond]
System_1:ss1 -> Subsystem_1 [xlabel="composition"]
Subsystem_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 1</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_2 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 2</b> </td> </tr>
<tr> <td>
<table align="left" border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_3 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 3</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
System_1:ss2 -> Subsystem_2;
System_1:ss3 -> Subsystem_3;
edge [xdir=back arrowtail=odiamond]
Subsystem_1:r1 -> "Shared resource" [label=aggregation]
Subsystem_2:r1 -> "Shared resource"
Subsystem_3:r1 -> "Shared resource"
"Shared resource" [
label = <{
<b>Shared resource</b>
|
+ property<br align="left"/>
...<br align="left"/>
|
+ method<br align="left"/>
...<br align="left"/>
}>
]
}
[/graph]
#+LAYOUT_NEXT[style=flex:0.66]
Generated by the following code:
``
[graph]
digraph UML_Class_diagram {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray90];
graph [
label="UML Class diagram demo"
labelloc="t"
fontname="Helvetica,Arial,sans-serif"
]
node [
fontname="Helvetica,Arial,sans-serif"
shape=record
style=filled
fillcolor=gray95
]
edge [fontname="Helvetica,Arial,sans-serif"]
edge [arrowhead=vee style=dashed]
Client -> Interface1 [label=dependency]
Client -> Interface2
edge [dir=back arrowtail=empty style=""]
Interface1 -> Class1 [xlabel=inheritance]
Interface2 -> Class1 [dir=none]
Interface2 [label="" xlabel="Simple\ninterface" shape=circle]
Interface1[label = <{<b>«interface» I/O</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
Class1[label = <{<b>I/O class</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
edge [dir=back arrowtail=empty style=dashed]
Class1 -> System_1 [label=implementation]
System_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>System</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left" >+ property</td> </tr>
<tr> <td port="ss1" align="left" >- Subsystem 1</td> </tr>
<tr> <td port="ss2" align="left" >- Subsystem 2</td> </tr>
<tr> <td port="ss3" align="left" >- Subsystem 3</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">+ method<br/>...<br align="left"/></td> </tr>
</table>>
]
edge [dir=back arrowtail=diamond]
System_1:ss1 -> Subsystem_1 [xlabel="composition"]
Subsystem_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 1</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_2 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 2</b> </td> </tr>
<tr> <td>
<table align="left" border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_3 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 3</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
System_1:ss2 -> Subsystem_2;
System_1:ss3 -> Subsystem_3;
edge [xdir=back arrowtail=odiamond]
Subsystem_1:r1 -> "Shared resource" [label=aggregation]
Subsystem_2:r1 -> "Shared resource"
Subsystem_3:r1 -> "Shared resource"
"Shared resource" [
label = <{
<b>Shared resource</b>
|
+ property<br align="left"/>
...<br align="left"/>
|
+ method<br align="left"/>
...<br align="left"/>
}>
]
}
[/graph]
``
#+LAYOUT_END
# Graphiz cache
Graphviz graphs that have been rendered to **svg** are stored in the cache database, under table ``cached_dot``.
Unless you modify the graph or it's properties, it won't be rendered again, instead it will be sourced from the database.
# Bindigs
* ``Lua, nml.graphviz.push(layout, width, dot)``
** ``layout`` *(string)* the layout engine
** ``width`` *(string)* the width property (empty string for default)
** ``dot`` *(string)* the graphviz code

View file

@ -1,16 +1,12 @@
@import ../template.nml
%<make_doc({"External Tools"}, "LaTeX", "LaTeX")>%
@compiler.output = latex.html
@nav.title = LaTeX
@nav.category = External Tools
@html.page_title = Documentation | LaTeX
@LaTeX = $|[kind=inline, caption=LaTeX]\LaTeX|$
#+LAYOUT_BEGIN Centered
*Bring some %LaTeX% unto your document!*
#+LAYOUT_END
# Requirements
In order to use LaTeX processing, you need to have a %LaTeX% distribution installed. We recommend the [TeX Live](https://en.wikipedia.org/wiki/TeX_Live) distribution.
You'll also need to install the [latex2svg](https://github.com/ef3d0c3e/nml/blob/master/third/latex2svg) python script provided with NML. You'll have to follow the installation instructions from the [original latex2svg repository](https://github.com/Moonbase59/latex2svg). If you don't want to add the script to your `\$PATH`, you can set the executable path in the §{tex_env}[caption=LaTeX environment].
# Inline Math
@ -48,7 +44,6 @@ $|\begin{tikzpicture}
``
Gives the following:
#+LAYOUT_BEGIN Centered
$|\begin{tikzpicture}
\begin{axis}
\addplot3[patch,patch refines=3,
@ -69,9 +64,8 @@ $|\begin{tikzpicture}
};
\end{axis}
\end{tikzpicture}|$
#+LAYOUT_END
#{tex_env} LaTeX environment
# LaTeX environment
You can define multiple %LaTeX% environment, the default being `main`
* ``@tex.env.fontsize`` The fontsize (in pt) specified to `latex2svg` (default: `12`).
@ -105,20 +99,4 @@ To set the environment you wish to use for a particular %LaTeX% element, set the
%LaTeX% elements that have been successfully rendered to **svg** are stored in the cache database, to avoid processing them a second time.
Note that this cache is shared between documents, so you don't need to reprocess them if they share the same environment.
They are stored under the table named ``Plain Text,cached_tex``, if you modify the `env` all elements will be reprocessed which may take a while...
# Bindings
* ``Lua, nml.tex.push_math(kind, tex [, env [, caption]])``
inserts a math mode %LaTeX% element.
** ``kind`` *(string)* the element kind (inline or block)
** ``tex`` *(string)* the %LaTeX% code
** ``env`` *(string)* the %LaTeX% environment (defaults to `main`)
** ``caption`` *(string)* the accessibility caption
* ``Lua, nml.tex.push(kind, tex [, env [, caption]])``
inserts a non-math %LaTeX% element.
** ``kind`` *(string)* the element kind (inline or block)
** ``tex`` *(string)* the %LaTeX% code
** ``env`` *(string)* the %LaTeX% environment (defaults to `main`)
** ``caption`` *(string)* the accessibility caption
They are stored under the table named ``cached_tex``, if you modify the `env` all elements will be reprocessed which may take a while...

View file

@ -1,4 +1,6 @@
@import template.nml
%<make_doc({}, "Index", "Index")>%
@compiler.output = index.html
@nav.title = Documentation
@html.page_title = Documentation | Index
# Welcome to the NML documentation!

View file

@ -1,5 +1,8 @@
@import ../template.nml
%<make_doc({"Lua"}, "Lua", "Lua Basics")>%
@compiler.output = lua.html
@nav.title = Lua
@nav.category = Lua
@html.page_title = Documentation | Lua
# Running lua code

View file

@ -1,31 +0,0 @@
@import template.nml
@nav.previous = Sections
%<make_doc({}, "References", "References")>%
#{internal_references} Internal references
Internal references allow you to create references to elements defined within the current document.
Reference the the current section: ``§{internal_reference}`` → §{internal_references}
## Media references
![flower](assets/flower.webm)[caption = Flower]
When you reference a medium from the current document, the reference can be hovered to show the referenced medium: §{flower}.
# External references
You can reference elements from other documents by adding the document's name before the reference name (separated by a ``#``).
The document name refers to the output file (as defined by the variable `compiler.output`) excluding the extension.
* ``§{doc#ref}``: Finds reference named `ref` in document named `doc`.
* ``§{#ref}``: Finds reference named `ref` in all documents.
Note that this will fail if there are multiple documents defining reference `ref`.
For instance:
* ``§{LaTeX#tex_env}[caption=LaTeX environment]`` → §{LaTeX#tex_env}[caption=LaTeX environment]
* ``§{#tex_env}[caption=LaTeX environment]`` → §{#tex_env}[caption=LaTeX environment]
# Properties
* ``caption`` The display caption for the reference

View file

@ -1,67 +0,0 @@
@import template.nml
@nav.previous = Getting Started
%<make_doc({}, "Sections", "Sections")>%
#{first} Sections
To add a section to your document, put one or more ``Plain Text, #`` at the start of the line, followed a space and the name of your section.
Which will render as:
#+LAYOUT_BEGIN Split
:: Make sure they don't pollute the ToC
#+ Section name
##+ Subsection
##*+ Unnumbered section
##+ Unnumbered section
#+ This section is not in the ToC
#+LAYOUT_NEXT
Given by the following:
``
# Section name
## Subsection
#* Unnumbered section
#+ This section is not in the ToC
``
#+LAYOUT_END
# Sections references
You can create a referenceable section by using ``Plain Text, #{refname}``, where `refname` is an internal reference name for use only within this document.
You can then create a clickable reference to this section: ``§{refname}`` or ``§{refname}[caption=Click me!]``. Below is an example of this in action:
###{refname}+* Section
§{refname}[caption=Click me!] or §{first}[caption=First section]
``
###{refname}+* Section
§{refname}[caption=Click me!] or §{first}[caption=First section]
``
# Section styling
The styling for the section link is controlled by the style key ``style.section``
* ``link_pos``: `Before|After|None` Position of the section link.
* ``link``: `[Before, Link, After]` 3 strings-array
```JSON, Default Style
{
"link_pos": "Before",
"link": ["", "🔗", " "]
}
```
# Bindings
* ``Lua, nml.section.push(title, depth, [, kind [, reference]])``
** ``title`` *(string)* the section display title
** ``depth`` *(number)* the section depth
** ``kind`` *(string)* the section kind
**- `\*` for unnumbered
**- `+` for outside of the table of content
**- `\*+` or `+\*` for both
** ``reference`` *(string)* the section reference name

View file

@ -1,32 +0,0 @@
@import template.nml
@nav.previous = Index
%<make_doc({}, "Getting Started", "Getting Started")>%
# Building NML
You need at least the nightly version of rustc to compile NML.
Instruction for your operating system can be found on [Rust's website](https://forge.rust-lang.org/infra/other-installation-methods.html).
You'll also need liblua 5.4 installed. You can then move the `nml` executable in `target/release/nml` into your `\$PATH`
``cargo build --bin nml`` or for release mode: ``cargo build --release --bin nml``
# Building your first document
* ``nml -i input.nml -o output.html``
# Using the cache
NML relies on sqlite to keep a cache of precompiled elements that take a long time to process (e.g $|[kind=inline] \LaTeX|$).
To enable caching, use option `-d` with a path: ``-d cache.db``. You can reuse the same cache for multiple documents and benefit from cached elements.
Note that in directory-processing mode, a cache is required so that only modified ``.nml`` files get reprocessed.
# Directory-Processing mode
To use directory-processing mode, you need to pass an input directory and an output directory. Directory-processing mode requires that you use a database, so that it knows which documents have already been compiled. If the output directory doesn't exist, it will be automatically created.
Compiling the docs:
``Plain Text,
nml -i docs -o docs_out -d cache.db
``
If you modify an ``Plain Text,@import``ed file, you will need to use the ``--force-rebuild`` option, as NML currently doesn't track which files are imported by other files.

View file

@ -1,12 +1,15 @@
@import ../template.nml
%<make_doc({"Styles"}, "Basic", "Basic Styles")>%
@compiler.output = basic.html
@nav.title = Basic
@nav.category = Styles
@html.page_title = Documentation | Basic Styles
# Basic styles
## Bold
Enclose text between two ``**`` to render it **bold**!
* ``**Bold text**`` → **Bold text**
* ``Bold [**link**](#)`` → Bold [**link**](#)
* ``**Bold [link](#)**`` → **Bold [link](#)**
## Italic

View file

@ -1,62 +0,0 @@
@import ../template.nml
%<make_doc({"Styles"}, "Layouts", "Basic Layouts")>%
# Layouts
You can create layout blocks by using the following tokens:
* ``#+LAYOUT_BEGIN <layout_name>`` Starts layout `<layout_name>`
* ``#+LAYOUT_NEXT`` Advances layout to the next block
* ``#+LAYOUT_END`` Ends last created layout
Here's an example of what you can do using layouts (with flashy colors for show):
#+LAYOUT_BEGIN[style=background-color:#F00;flex:0.5] Split
First
#+LAYOUT_BEGIN[style=background-color:#FF0] Centered
Second
#+LAYOUT_END
#+LAYOUT_NEXT[style=background-color:#00F]
Third
#+LAYOUT_BEGIN[style=background-color:#0FF] Split
Fourth
#+LAYOUT_NEXT[style=background-color:#0F0]
Fifth
#+LAYOUT_END
#+LAYOUT_END
Given by the following code:
```Plain Text
#+LAYOUT_BEGIN[style=background-color:#F00;flex:0.5] Split
First
#+LAYOUT_BEGIN[style=background-color:#FF0] Centered
Second
#+LAYOUT_END
#+LAYOUT_NEXT[style=background-color:#00F]
Third
#+LAYOUT_BEGIN[style=background-color:#0FF] Split
Fourth
#+LAYOUT_NEXT[style=background-color:#0F0]
Fifth
#+LAYOUT_END
#+LAYOUT_END
```
*(indentation is for readability)*
# Available layouts
## Centered
Centered layout align text to the center of the current block.
####+* Style
The ``Centered`` layout uses the `.centered` css class to center the text.
####+* Properties
* ``style`` Added css style to the div (defaults to none)
## Split
####+* Style
The ``Split`` layout uses the `.split-container` and `.split` css class to create the desired layout.
If you wish to modify the relative width of the splits: add `style=flex: 0.5` in the properties, this makes the following split half the width of the other splits.
####+* Properties
* ``style`` Added css style to the div (defaults to none)

View file

@ -1,39 +1,7 @@
@import ../template.nml
%<make_doc({"Styles"}, "User-Defined", "User-Defined Styles")>%
@compiler.output = user-defined.html
@nav.title = User-Defined
@nav.category = Styles
@html.page_title = Documentation | User-Defined Styles
# Defining a custom style
```Lua
%<[main]
function undercustom_start(color)
nml.raw.push("inline", "<span style=\"border-bottom: 1px dashed " .. color .. "\">")
end
function undercustom_end()
nml.raw.push("inline", "</span>")
end
nml.custom_style.define_toggled("Undercustom Red", "~", "undercustom_start(\"red\")", "undercustom_end()")
nml.custom_style.define_paired("Undercustom Green", "[|", "|]", "undercustom_start(\"Green\")", "undercustom_end()")
>%
```
%<[main]
function undercustom_start(color)
nml.raw.push("inline", "<span style=\"border-bottom: 1px dashed " .. color .. "\">")
end
function undercustom_end()
nml.raw.push("inline", "</span>")
end
nml.custom_style.define_toggled("Undercustom Red", "~", "undercustom_start(\"red\")", "undercustom_end()")
nml.custom_style.define_paired("Undercustom Green", "[|", "|]", "undercustom_start(\"Green\")", "undercustom_end()")
>%
Results in the following:
* ``Plain Text,~Dashed underline~`` → ~Dashed underline~
* ``Plain Text,[|Dashed underline|]`` → [|Dashed underline|]
# Limitations
* Custom styles cannot be removed and will be defined through the entire document
* Custom styles defined from lua must have their `start` and `end` functions in the `main` lua kernel.
# TODO

View file

@ -6,27 +6,3 @@
\definecolor{__color1}{HTML}{d5d5d5} \\
\everymath{\color{__color1}\displaystyle}
@tex.main.block_prepend = \color{__color1}
@<
function make_doc(categories, title, page_title)
-- Navigation
nml.variable.insert("nav.title", title)
if categories[1] ~= nil
then
nml.variable.insert("nav.category", categories[1])
if categories[2] ~= nil
then
nml.variable.insert("nav.subcategory", categories[2])
end
end
-- HTML
nml.variable.insert("html.page_title", "NML | " .. page_title)
nml.variable.insert("compiler.output", page_title .. ".html")
end
>@
@@style.section = {
"link_pos": "Before",
"link": ["", "🔗 ", " "]
}

8
src/cache/cache.rs vendored
View file

@ -23,7 +23,7 @@ pub trait Cached {
fn key(&self) -> <Self as Cached>::Key;
fn init(con: &Connection) -> Result<(), rusqlite::Error> {
fn init(con: &mut Connection) -> Result<(), rusqlite::Error> {
con.execute(<Self as Cached>::sql_table(), ()).map(|_| ())
}
@ -38,7 +38,7 @@ pub trait Cached {
/// Note that on error, [`f`] may still have been called
fn cached<E, F>(
&self,
con: &Connection,
con: &mut Connection,
f: F,
) -> Result<<Self as Cached>::Value, CachedError<E>>
where
@ -62,10 +62,10 @@ pub trait Cached {
if let Some(value) = value {
// Found in cache
Ok(value)
return Ok(value);
} else {
// Compute a value
let value = match f(self) {
let value = match f(&self) {
Ok(val) => val,
Err(e) => return Err(CachedError::GenErr(e)),
};

View file

@ -1,137 +1,48 @@
use std::cell::Ref;
use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap;
use std::rc::Rc;
use rusqlite::Connection;
use crate::document::document::CrossReference;
use crate::document::document::Document;
use crate::document::document::ElemReference;
use crate::document::variable::Variable;
use super::postprocess::PostProcess;
#[derive(Clone, Copy)]
pub enum Target {
HTML,
#[allow(unused)]
LATEX,
}
pub struct Compiler<'a> {
pub struct Compiler {
target: Target,
cache: Option<&'a Connection>,
cache: Option<RefCell<Connection>>,
reference_count: RefCell<HashMap<String, HashMap<String, usize>>>,
sections_counter: RefCell<Vec<usize>>,
unresolved_references: RefCell<Vec<(usize, CrossReference)>>,
// TODO: External references, i.e resolved later
}
impl<'a> Compiler<'a> {
pub fn new(target: Target, con: Option<&'a Connection>) -> Self {
impl Compiler {
pub fn new(target: Target, db_path: Option<String>) -> Self {
let cache = match db_path {
None => None,
Some(path) => match Connection::open(path) {
Err(e) => panic!("Cannot connect to database: {e}"),
Ok(con) => Some(con),
},
};
Self {
target,
cache: con,
cache: cache.map(|con| RefCell::new(con)),
reference_count: RefCell::new(HashMap::new()),
sections_counter: RefCell::new(vec![]),
unresolved_references: RefCell::new(vec![]),
}
}
/// Gets the section counter for a given depth
/// This function modifies the section counter
pub fn section_counter(&self, depth: usize) -> Ref<'_, Vec<usize>> {
// Increment current counter
if self.sections_counter.borrow().len() == depth {
self.sections_counter
.borrow_mut()
.last_mut()
.map(|id| *id += 1);
return Ref::map(self.sections_counter.borrow(), |b| b);
}
// Close
while self.sections_counter.borrow().len() > depth {
self.sections_counter.borrow_mut().pop();
}
// Open
while self.sections_counter.borrow().len() < depth {
self.sections_counter.borrow_mut().push(1);
}
Ref::map(self.sections_counter.borrow(), |b| b)
}
/// Sanitizes text for a [`Target`]
pub fn sanitize<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => str
.as_ref()
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;"),
_ => todo!("Sanitize not implemented"),
}
}
/// Sanitizes a format string for a [`Target`]
///
/// # Notes
///
/// This function may process invalid format string, which will be caught later
/// by runtime_format.
pub fn sanitize_format<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => {
let mut out = String::new();
let mut braces = 0;
for c in str.as_ref().chars() {
if c == '{' {
out.push(c);
braces += 1;
continue;
} else if c == '}' {
out.push(c);
if braces != 0 {
braces -= 1;
}
continue;
}
// Inside format args
if braces % 2 == 1 {
out.push(c);
continue;
}
match c {
'&' => out += "&amp;",
'<' => out += "&lt;",
'>' => out += "&gt;",
'"' => out += "&quot;",
_ => out.push(c),
}
}
out
}
_ => todo!("Sanitize not implemented"),
}
}
/// Gets a reference name
pub fn refname<S: AsRef<str>>(target: Target, str: S) -> String {
Self::sanitize(target, str).replace(' ', "_")
}
/// Inserts or get a reference id for the compiled document
///
/// # Parameters
/// - [`reference`] The reference to get or insert
pub fn reference_id(&self, document: &dyn Document, reference: ElemReference) -> usize {
pub fn reference_id<'a>(&self, document: &'a dyn Document, reference: ElemReference) -> usize {
let mut borrow = self.reference_count.borrow_mut();
let reference = document.get_from_reference(&reference).unwrap();
let refkey = reference.refcount_key();
@ -157,18 +68,22 @@ impl<'a> Compiler<'a> {
}
}
/// Inserts a new crossreference
pub fn insert_crossreference(&self, pos: usize, reference: CrossReference) {
self.unresolved_references
.borrow_mut()
.push((pos, reference));
}
pub fn target(&self) -> Target { self.target }
pub fn cache(&self) -> Option<&'a Connection> {
self.cache
//self.cache.as_ref().map(RefCell::borrow_mut)
pub fn cache(&self) -> Option<RefMut<'_, Connection>> {
self.cache.as_ref().map(RefCell::borrow_mut)
}
pub fn sanitize<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => str
.as_ref()
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;"),
_ => todo!("Sanitize not implemented"),
}
}
pub fn header(&self, document: &dyn Document) -> String {
@ -176,13 +91,16 @@ impl<'a> Compiler<'a> {
document: &dyn Document,
var_name: &'static str,
) -> Option<Rc<dyn Variable>> {
document.get_variable(var_name).or_else(|| {
println!(
"Missing variable `{var_name}` in {}",
document.source().name()
);
None
})
document
.get_variable(var_name)
.and_then(|var| Some(var))
.or_else(|| {
println!(
"Missing variable `{var_name}` in {}",
document.source().name()
);
None
})
}
let mut result = String::new();
@ -191,11 +109,8 @@ impl<'a> Compiler<'a> {
result += "<!DOCTYPE HTML><html><head>";
result += "<meta charset=\"UTF-8\">";
if let Some(page_title) = get_variable_or_error(document, "html.page_title") {
result += format!(
"<title>{}</title>",
Compiler::sanitize(self.target(), page_title.to_string())
)
.as_str();
result += format!("<title>{}</title>", Compiler::sanitize(self.target(), page_title.to_string()))
.as_str();
}
if let Some(css) = document.get_variable("html.css") {
@ -205,7 +120,7 @@ impl<'a> Compiler<'a> {
)
.as_str();
}
result += r#"</head><body><div class="layout">"#;
result += r#"</head><body><div id="layout">"#;
// TODO: TOC
// TODO: Author, Date, Title, Div
@ -226,20 +141,20 @@ impl<'a> Compiler<'a> {
result
}
pub fn compile(&self, document: &dyn Document) -> (CompiledDocument, PostProcess) {
pub fn compile(&self, document: &dyn Document) -> CompiledDocument {
let borrow = document.content().borrow();
// Header
let header = self.header(document);
// Body
let mut body = r#"<div class="content">"#.to_string();
let mut body = r#"<div id="content">"#.to_string();
for i in 0..borrow.len() {
let elem = &borrow[i];
match elem.compile(self, document, body.len()) {
match elem.compile(self, document) {
Ok(result) => body.push_str(result.as_str()),
Err(err) => println!("Unable to compile element: {err}\n{elem:#?}"),
Err(err) => println!("Unable to compile element: {err}\n{}", elem.to_string()),
}
}
body.push_str("</div>");
@ -256,35 +171,14 @@ impl<'a> Compiler<'a> {
.map(|(key, var)| (key.clone(), var.to_string()))
.collect::<HashMap<String, String>>();
// References
let references = document
.scope()
.borrow_mut()
.referenceable
.iter()
.map(|(key, reference)| {
let elem = document.get_from_reference(reference).unwrap();
let refid = self.reference_id(document, *reference);
(key.clone(), elem.refid(self, refid))
})
.collect::<HashMap<String, String>>();
let postprocess = PostProcess {
resolve_references: self.unresolved_references.replace(vec![]),
};
let cdoc = CompiledDocument {
CompiledDocument {
input: document.source().name().clone(),
mtime: 0,
variables,
references,
header,
body,
footer,
};
(cdoc, postprocess)
}
}
}
@ -295,14 +189,12 @@ pub struct CompiledDocument {
/// Modification time (i.e seconds since last epoch)
pub mtime: u64,
/// All the variables defined in the document
/// with values mapped by [`Variable::to_string()`]
// TODO: Also store exported references
// so they can be referenced from elsewhere
// This will also require rebuilding in case some exported references have changed...
/// Variables exported to string, so they can be querried later
pub variables: HashMap<String, String>,
/// All the referenceable elements in the document
/// with values mapped by [`ReferenceableElement::refid()`]
pub references: HashMap<String, String>,
/// Compiled document's header
pub header: String,
/// Compiled document's body
@ -315,11 +207,10 @@ impl CompiledDocument {
pub fn get_variable(&self, name: &str) -> Option<&String> { self.variables.get(name) }
fn sql_table() -> &'static str {
"CREATE TABLE IF NOT EXISTS compiled_documents(
"CREATE TABLE IF NOT EXISTS compiled_documents (
input TEXT PRIMARY KEY,
mtime INTEGER NOT NULL,
variables TEXT NOT NULL,
internal_references TEXT NOT NULL,
header TEXT NOT NULL,
body TEXT NOT NULL,
footer TEXT NOT NULL
@ -329,7 +220,7 @@ impl CompiledDocument {
fn sql_get_query() -> &'static str { "SELECT * FROM compiled_documents WHERE input = (?1)" }
fn sql_insert_query() -> &'static str {
"INSERT OR REPLACE INTO compiled_documents (input, mtime, variables, internal_references, header, body, footer) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)"
"INSERT OR REPLACE INTO compiled_documents (input, mtime, variables, header, body, footer) VALUES (?1, ?2, ?3, ?4, ?5, ?6)"
}
pub fn init_cache(con: &Connection) -> Result<usize, rusqlite::Error> {
@ -342,16 +233,15 @@ impl CompiledDocument {
input: input.to_string(),
mtime: row.get_unwrap::<_, u64>(1),
variables: serde_json::from_str(row.get_unwrap::<_, String>(2).as_str()).unwrap(),
references: serde_json::from_str(row.get_unwrap::<_, String>(3).as_str()).unwrap(),
header: row.get_unwrap::<_, String>(4),
body: row.get_unwrap::<_, String>(5),
footer: row.get_unwrap::<_, String>(6),
header: row.get_unwrap::<_, String>(3),
body: row.get_unwrap::<_, String>(4),
footer: row.get_unwrap::<_, String>(5),
})
})
.ok()
}
/// Interts [`CompiledDocument`] into cache
/// Inserts [`CompiledDocument`] into cache
pub fn insert_cache(&self, con: &Connection) -> Result<usize, rusqlite::Error> {
con.execute(
Self::sql_insert_query(),
@ -359,7 +249,6 @@ impl CompiledDocument {
&self.input,
&self.mtime,
serde_json::to_string(&self.variables).unwrap(),
serde_json::to_string(&self.references).unwrap(),
&self.header,
&self.body,
&self.footer,
@ -367,19 +256,3 @@ impl CompiledDocument {
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sanitize_test() {
assert_eq!(Compiler::sanitize(Target::HTML, "<a>"), "&lt;a&gt;");
assert_eq!(Compiler::sanitize(Target::HTML, "&lt;"), "&amp;lt;");
assert_eq!(Compiler::sanitize(Target::HTML, "\""), "&quot;");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{<>&\"}"), "{<>&\"}");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{{<>}}"), "{{&lt;&gt;}}");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{{<"), "{{&lt;");
}
}

View file

@ -1,4 +1,2 @@
pub mod compiler;
pub mod navigation;
pub mod process;
pub mod postprocess;

View file

@ -1,58 +1,45 @@
use std::cell::RefCell;
use std::collections::HashMap;
use crate::compiler::compiler::Compiler;
use super::compiler::CompiledDocument;
use super::compiler::Target;
use super::postprocess::PostProcess;
#[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct NavEntry {
title: String,
path: String,
previous: Option<String>,
}
#[derive(Debug, Default)]
pub struct NavEntries {
pub(self) entries: Vec<NavEntry>,
pub(self) children: HashMap<String, NavEntries>,
pub struct NavEntry {
pub(self) entries: Vec<(String, String)>,
pub(self) children: HashMap<String, NavEntry>,
}
impl NavEntries {
impl NavEntry {
// FIXME: Sanitize
pub fn compile(&self, target: Target, doc: &RefCell<CompiledDocument>) -> String {
let doc_borrow = doc.borrow();
pub fn compile(&self, target: Target, doc: &CompiledDocument) -> String {
let categories = vec![
doc_borrow
.get_variable("nav.category")
.map_or("", |s| s.as_str()),
doc_borrow
.get_variable("nav.subcategory")
doc.get_variable("nav.category").map_or("", |s| s.as_str()),
doc.get_variable("nav.subcategory")
.map_or("", |s| s.as_str()),
];
let mut result = String::new();
match target {
Target::HTML => {
result += r#"<div class="navbar"><ul>"#;
result += r#"<div id="navbar"><ul>"#;
fn process(
target: Target,
categories: &Vec<&str>,
did_match: bool,
result: &mut String,
entry: &NavEntries,
entry: &NavEntry,
depth: usize,
) {
// Orphans = Links
for entry in &entry.entries {
for (title, path) in &entry.entries {
result.push_str(
format!(
r#"<li><a href="{}">{}</a></li>"#,
Compiler::sanitize(target, entry.path.as_str()),
Compiler::sanitize(target, entry.title.as_str())
Compiler::sanitize(target, path),
Compiler::sanitize(target, title)
)
.as_str(),
);
@ -88,93 +75,47 @@ impl NavEntries {
}
result
}
fn sort_entry(
entrymap: &HashMap<String, Option<String>>,
left_title: &str,
right_title: &str,
) -> std::cmp::Ordering {
let left_previous = entrymap.get(left_title).unwrap();
let right_previous = entrymap.get(right_title).unwrap();
match (left_previous, right_previous) {
(Some(lp), Some(rp)) => {
if lp.as_str() == right_title {
std::cmp::Ordering::Greater
} else if rp.as_str() == left_title {
std::cmp::Ordering::Less
} else if rp.as_str() == lp.as_str() {
left_title.cmp(right_title)
} else {
Self::sort_entry(entrymap, lp.as_str(), rp.as_str())
}
}
(Some(lp), None) => {
if right_title == lp.as_str() {
std::cmp::Ordering::Greater
} else {
left_title.cmp(right_title)
}
}
(None, Some(rp)) => {
if left_title == rp.as_str() {
std::cmp::Ordering::Less
} else {
left_title.cmp(right_title)
}
}
(None, None) => left_title.cmp(right_title),
}
}
}
pub fn create_navigation(
docs: &Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>,
) -> Result<NavEntries, String> {
let mut nav = NavEntries {
pub fn create_navigation(docs: &Vec<CompiledDocument>) -> Result<NavEntry, String> {
let mut nav = NavEntry {
entries: vec![],
children: HashMap::new(),
};
// All paths (for duplicate checking)
let mut all_paths = HashMap::new();
for (doc, _) in docs {
let doc_borrow = doc.borrow();
let cat = doc_borrow.get_variable("nav.category");
let subcat = doc_borrow.get_variable("nav.subcategory");
let title = doc_borrow
for doc in docs {
let cat = doc.get_variable("nav.category");
let subcat = doc.get_variable("nav.subcategory");
let title = doc
.get_variable("nav.title")
.or(doc_borrow.get_variable("doc.title"));
let previous = doc_borrow.get_variable("nav.previous").cloned();
let path = doc_borrow.get_variable("compiler.output");
.or(doc.get_variable("doc.title"));
let path = doc.get_variable("compiler.output");
let (title, path) = match (title, path) {
(Some(title), Some(path)) => (title, path),
_ => {
eprintln!("Skipping navigation generation for `{}`, must have a defined `@nav.title` and `@compiler.output`", doc_borrow.input);
eprintln!("Skipping navigation generation for `{}`, must have a defined `@nav.title` and `@compiler.output`", doc.input);
continue;
}
};
// Get entry to insert into
let pent = if let Some(subcat) = subcat {
let cat = match cat {
Some(cat) => cat,
None => {
eprintln!(
"Skipping `{}`: No `@nav.category`, but `@nav.subcategory` is set",
doc_borrow.input
doc.input
);
continue;
}
};
let cat_ent = match nav.children.get_mut(cat.as_str()) {
let mut cat_ent = match nav.children.get_mut(cat.as_str()) {
Some(cat_ent) => cat_ent,
None => {
// Insert
nav.children.insert(cat.clone(), NavEntries::default());
nav.children.insert(cat.clone(), NavEntry::default());
nav.children.get_mut(cat.as_str()).unwrap()
}
};
@ -183,9 +124,7 @@ pub fn create_navigation(
Some(subcat_ent) => subcat_ent,
None => {
// Insert
cat_ent
.children
.insert(subcat.clone(), NavEntries::default());
cat_ent.children.insert(subcat.clone(), NavEntry::default());
cat_ent.children.get_mut(subcat.as_str()).unwrap()
}
}
@ -194,7 +133,7 @@ pub fn create_navigation(
Some(cat_ent) => cat_ent,
None => {
// Insert
nav.children.insert(cat.clone(), NavEntries::default());
nav.children.insert(cat.clone(), NavEntry::default());
nav.children.get_mut(cat.as_str()).unwrap()
}
}
@ -202,158 +141,8 @@ pub fn create_navigation(
&mut nav
};
// Find duplicates titles in current parent
for entry in &pent.entries {
if &entry.title == title {
return Err(format!(
"Conflicting entry title `{title}` for entries with the same parent: ({})",
pent.entries
.iter()
.map(|entry| entry.title.clone())
.collect::<Vec<_>>()
.join(", ")
));
}
}
// Find duplicate paths
if let Some(dup_title) = all_paths.get(path) {
return Err(format!("Conflicting paths: `{path}`. Previously used for entry: `{dup_title}`, conflicting use in `{title}`"));
}
all_paths.insert(path.clone(), title.clone());
pent.entries.push(NavEntry {
title: title.clone(),
path: path.clone(),
previous,
});
pent.entries.push((title.clone(), path.clone()))
}
// Sort entries
fn sort_entries(nav: &mut NavEntries) {
let entrymap = nav
.entries
.iter()
.map(|ent| (ent.title.clone(), ent.previous.clone()))
.collect::<HashMap<String, Option<String>>>();
nav.entries
.sort_by(|l, r| NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str()));
for (_, child) in &mut nav.children {
sort_entries(child);
}
}
sort_entries(&mut nav);
Ok(nav)
}
#[cfg(test)]
mod tests {
use rand::prelude::SliceRandom;
use rand::rngs::OsRng;
use crate::compiler::process::process_from_memory;
use super::*;
#[test]
fn sort() {
let entries: Vec<NavEntry> = vec![
NavEntry {
title: "Index".into(),
path: "".into(),
previous: None,
},
NavEntry {
title: "AB".into(),
path: "".into(),
previous: Some("Index".into()),
},
NavEntry {
title: "Getting Started".into(),
path: "".into(),
previous: Some("Index".into()),
},
NavEntry {
title: "Sections".into(),
path: "".into(),
previous: Some("Getting Started".into()),
},
NavEntry {
title: "Style".into(),
path: "".into(),
previous: Some("Getting Started".into()),
},
];
let mut shuffled = entries.clone();
for _ in 0..10 {
let mut rng = OsRng {};
shuffled.shuffle(&mut rng);
let entrymap = shuffled
.iter()
.map(|ent| (ent.title.clone(), ent.previous.clone()))
.collect::<HashMap<String, Option<String>>>();
shuffled.sort_by(|l, r| {
NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str())
});
assert_eq!(shuffled, entries);
}
}
#[test]
pub fn batch() {
let result = process_from_memory(
Target::HTML,
vec![
r#"
@html.page_title = 0
@compiler.output = 0.html
@nav.title = C
@nav.category = First
"#
.into(),
r#"
@html.page_title = 1
@compiler.output = 1.html
@nav.title = A
@nav.category = First
"#
.into(),
r#"
@html.page_title = 2
@compiler.output = 2.html
@nav.title = B
@nav.category = First
"#
.into(),
],
)
.unwrap();
let nav = create_navigation(&result).unwrap();
assert_eq!(
nav.children.get("First").unwrap().entries,
vec![
NavEntry {
title: "A".to_string(),
path: "1.html".to_string(),
previous: None
},
NavEntry {
title: "B".to_string(),
path: "2.html".to_string(),
previous: None
},
NavEntry {
title: "C".to_string(),
path: "0.html".to_string(),
previous: None
},
]
);
}
}

View file

@ -1,81 +0,0 @@
use std::cell::RefCell;
use crate::document::document::CrossReference;
use super::compiler::CompiledDocument;
use super::compiler::Target;
/// Represents the list of tasks that have to run after the document has been compiled and the
/// compiled document list has been built. Every task is stored with a raw byte position in the
/// compiled document's body. The position represents the original position and thus should be
/// offset accordingly to other post-processing tasks.
pub struct PostProcess {
/// List of references to resolve i.e insert the resolved refname at a certain byte position
/// in the document's body
pub resolve_references: Vec<(usize, CrossReference)>,
}
impl PostProcess {
/// Applies postprocessing to a [`CompiledDocument`]
pub fn apply(
&self,
_target: Target,
list: &Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>,
doc: &RefCell<CompiledDocument>,
) -> Result<String, String> {
let mut content = doc.borrow().body.clone();
let mut offset = 0;
for (pos, cross_ref) in &self.resolve_references {
// Cross-references
let mut found_ref: Option<(String, &RefCell<CompiledDocument>)> = None;
match cross_ref {
CrossReference::Unspecific(name) => {
for (doc, _) in list {
if let Some(found) = doc.borrow().references.get(name) {
// Check for duplicates
if let Some((_, previous_doc)) = &found_ref {
return Err(format!("Cannot use an unspecific reference for reference named: `{name}`. Found in document `{}` but also in `{}`. Specify the source of the reference to resolve the conflict.", previous_doc.borrow().input, doc.borrow().input));
}
found_ref = Some((found.clone(), doc));
}
}
}
CrossReference::Specific(doc_name, name) => {
let ref_doc = list.iter().find(|(doc, _)| {
let doc_borrow = doc.borrow();
if let Some(outname) = doc_borrow.variables.get("compiler.output") {
// Strip extension
let split_at = outname.rfind('.').unwrap_or(outname.len());
return doc_name == outname.split_at(split_at).0;
}
false
});
if ref_doc.is_none() {
return Err(format!(
"Cannot find document `{doc_name}` for reference `{name}` in `{}`",
doc.borrow().input
));
}
if let Some(found) = ref_doc.unwrap().0.borrow().references.get(name) {
found_ref = Some((found.clone(), &ref_doc.unwrap().0));
}
}
}
if let Some((found_ref, found_doc)) = &found_ref {
let found_borrow = found_doc.borrow();
let found_path = found_borrow.get_variable("compiler.output").ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
let insert_content = format!("{found_path}#{found_ref}");
content.insert_str(pos + offset, insert_content.as_str());
offset += insert_content.len();
} else {
return Err(format!("Cannot find reference `{cross_ref}` from document `{}`. Aborting postprocessing.", doc.borrow().input));
}
}
Ok(content)
}
}

View file

@ -1,196 +0,0 @@
use std::cell::RefCell;
use std::path::PathBuf;
use std::rc::Rc;
use std::time::UNIX_EPOCH;
use rusqlite::Connection;
use crate::document::document::Document;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::source::Source;
use crate::parser::source::SourceFile;
use super::compiler::CompiledDocument;
use super::compiler::Compiler;
use super::compiler::Target;
use super::postprocess::PostProcess;
/// Parses a source file into a document
fn parse(
parser: &LangParser,
source: Rc<dyn Source>,
debug_opts: &Vec<String>,
) -> Result<Box<dyn Document<'static>>, String> {
// Parse
//let source = SourceFile::new(input.to_string(), None).unwrap();
let (doc, _) = parser.parse(ParserState::new(parser, None), source.clone(), None);
if debug_opts.contains(&"ast".to_string()) {
println!("-- BEGIN AST DEBUGGING --");
doc.content()
.borrow()
.iter()
.for_each(|elem| println!("{elem:#?}"));
println!("-- END AST DEBUGGING --");
}
if debug_opts.contains(&"ref".to_string()) {
println!("-- BEGIN REFERENCES DEBUGGING --");
let sc = doc.scope().borrow();
sc.referenceable.iter().for_each(|(name, reference)| {
println!(" - {name}: `{:#?}`", doc.get_from_reference(reference));
});
println!("-- END REFERENCES DEBUGGING --");
}
if debug_opts.contains(&"var".to_string()) {
println!("-- BEGIN VARIABLES DEBUGGING --");
let sc = doc.scope().borrow();
sc.variables.iter().for_each(|(_name, var)| {
println!(" - `{:#?}`", var);
});
println!("-- END VARIABLES DEBUGGING --");
}
if parser.has_error() {
return Err("Parsing failed due to errors while parsing".to_string());
}
Ok(doc)
}
/// Takes a list of paths and processes it into a list of compiled documents
pub fn process(
target: Target,
files: Vec<PathBuf>,
db_path: &Option<String>,
force_rebuild: bool,
debug_opts: &Vec<String>,
) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
let mut compiled = vec![];
let current_dir = std::env::current_dir()
.map_err(|err| format!("Unable to get the current working directory: {err}"))?;
let con = db_path
.as_ref()
.map_or(Connection::open_in_memory(), Connection::open)
.map_err(|err| format!("Unable to open connection to the database: {err}"))?;
CompiledDocument::init_cache(&con)
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
let parser = LangParser::default();
for file in files {
let meta = std::fs::metadata(&file)
.map_err(|err| format!("Failed to get metadata for `{file:#?}`: {err}"))?;
let modified = meta
.modified()
.map_err(|err| format!("Unable to query modification time for `{file:#?}`: {err}"))?;
// Move to file's directory
let file_parent_path = file
.parent()
.ok_or(format!("Failed to get parent path for `{file:#?}`"))?;
std::env::set_current_dir(file_parent_path)
.map_err(|err| format!("Failed to move to path `{file_parent_path:#?}`: {err}"))?;
let parse_and_compile = || -> Result<(CompiledDocument, Option<PostProcess>), String> {
// Parse
let source = SourceFile::new(file.to_str().unwrap().to_string(), None).unwrap();
println!("Parsing {}...", source.name());
let doc = parse(&parser, Rc::new(source), debug_opts)?;
// Compile
let compiler = Compiler::new(target, Some(&con));
let (mut compiled, postprocess) = compiler.compile(&*doc);
compiled.mtime = modified.duration_since(UNIX_EPOCH).unwrap().as_secs();
Ok((compiled, Some(postprocess)))
};
let (cdoc, post) = if force_rebuild {
parse_and_compile()?
} else {
match CompiledDocument::from_cache(&con, file.to_str().unwrap()) {
Some(compiled) => {
if compiled.mtime < modified.duration_since(UNIX_EPOCH).unwrap().as_secs() {
parse_and_compile()?
} else {
(compiled, None)
}
}
None => parse_and_compile()?,
}
};
compiled.push((RefCell::new(cdoc), post));
}
for (doc, postprocess) in &compiled {
if postprocess.is_none() {
continue;
}
// Post processing
let body = postprocess
.as_ref()
.unwrap()
.apply(target, &compiled, doc)?;
doc.borrow_mut().body = body;
// Insert into cache
doc.borrow().insert_cache(&con).map_err(|err| {
format!(
"Failed to insert compiled document from `{}` into cache: {err}",
doc.borrow().input
)
})?;
}
std::env::set_current_dir(current_dir)
.map_err(|err| format!("Failed to set current directory: {err}"))?;
Ok(compiled)
}
/// Processes sources from in-memory strings
/// This function is indented for testing
#[cfg(test)]
pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
let mut compiled = vec![];
let parser = LangParser::default();
for (idx, content) in sources.iter().enumerate() {
let parse_and_compile = || -> Result<(CompiledDocument, Option<PostProcess>), String> {
// Parse
let source = SourceFile::with_content(format!("{idx}"), content.clone(), None);
let doc = parse(&parser, Rc::new(source), &vec![])?;
// Compile
let compiler = Compiler::new(target, None);
let (compiled, postprocess) = compiler.compile(&*doc);
Ok((compiled, Some(postprocess)))
};
let (cdoc, post) = parse_and_compile()?;
compiled.push((RefCell::new(cdoc), post));
}
for (doc, postprocess) in &compiled {
if postprocess.is_none() {
continue;
}
// Post processing
let body = postprocess
.as_ref()
.unwrap()
.apply(target, &compiled, doc)?;
doc.borrow_mut().body = body;
}
Ok(compiled)
}

View file

@ -4,17 +4,13 @@ use std::cell::RefMut;
use std::collections::hash_map::HashMap;
use std::rc::Rc;
use serde::Deserialize;
use serde::Serialize;
use crate::parser::source::Source;
use super::element::Element;
use super::element::ReferenceableElement;
use super::variable::Variable;
/// For references inside the current document
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy)]
pub enum ElemReference {
Direct(usize),
@ -22,30 +18,10 @@ pub enum ElemReference {
Nested(usize, usize),
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum CrossReference {
/// When the referenced document is unspecified
Unspecific(String),
/// When the referenced document is specified
Specific(String, String),
}
impl core::fmt::Display for CrossReference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self
{
CrossReference::Unspecific(name) => write!(f, "#{name}"),
CrossReference::Specific(doc_name, name) => write!(f, "{doc_name}#{name}"),
}
}
}
#[derive(Debug)]
pub struct Scope {
/// List of all referenceable elements in current scope.
/// All elements in this should return a non empty element
/// when [`Element::as_referenceable`] is called
/// All elements in this should return a non empty
pub referenceable: HashMap<String, ElemReference>,
pub variables: HashMap<String, Rc<dyn Variable>>,
}
@ -75,7 +51,7 @@ impl Scope {
// Variables
self.variables
.extend(other.variables.drain());
.extend(other.variables.drain().map(|(name, var)| (name, var)));
}
false => {
// References
@ -165,15 +141,15 @@ pub trait Document<'a>: core::fmt::Debug {
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
match self.scope().borrow().variables.get(name) {
Some(variable) => {
Some(variable.clone())
return Some(variable.clone());
}
// Continue search recursively
None => match self.parent() {
Some(parent) => parent.get_variable(name),
Some(parent) => return parent.get_variable(name),
// Not found
None => None,
None => return None,
},
}
}
@ -189,23 +165,27 @@ pub trait Document<'a>: core::fmt::Debug {
scope: &RefCell<Scope>,
merge_as: Option<&String>,
) {
if let Some(merge_as) = merge_as { self.scope().borrow_mut().merge(
&mut scope.borrow_mut(),
merge_as,
self.content().borrow().len(),
) }
match merge_as {
Some(merge_as) => self.scope().borrow_mut().merge(
&mut *scope.borrow_mut(),
merge_as,
self.content().borrow().len() + 1,
),
_ => {}
}
// Content
self.content()
.borrow_mut()
.extend((content.borrow_mut()).drain(..));
.extend((content.borrow_mut()).drain(..).map(|value| value));
}
fn get_reference(&self, refname: &str) -> Option<ElemReference> {
self.scope()
.borrow()
.referenceable
.get(refname).copied()
.get(refname)
.and_then(|reference| Some(*reference))
}
fn get_from_reference(
@ -251,61 +231,3 @@ impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_ {
.ok()
}
}
#[cfg(test)]
pub mod tests {
#[macro_export]
macro_rules! validate_document {
($container:expr, $idx:expr,) => {};
($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
$(
let val = &elem.downcast_ref::<$t>().unwrap().$field;
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
stringify!($field),
stringify!($t),
$idx,
$value,
val);
)*
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t));
let contained = elem.as_container().unwrap().contained();
validate_document!(contained, 0, $($ts)*);
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* } { $($ts:tt)* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
$(
let val = &elem.downcast_ref::<$t>().unwrap().$field;
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
stringify!($field),
stringify!($t),
$idx,
$value,
val);
)*
let contained = elem.as_container().unwrap().contained();
validate_document!(contained, 0, $($ts)*);
validate_document!($container, ($idx+1), $($tail)*);
}};
}
}

View file

@ -1,7 +1,7 @@
use std::str::FromStr;
use crate::compiler::compiler::Compiler;
use crate::elements::reference::InternalReference;
use crate::elements::reference::Reference;
use crate::parser::source::Token;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
@ -34,7 +34,7 @@ impl FromStr for ElemKind {
}
}
pub trait Element: Downcast + core::fmt::Debug {
pub trait Element: Downcast {
/// Gets the element defined location i.e token without filename
fn location(&self) -> &Token;
@ -43,6 +43,9 @@ pub trait Element: Downcast + core::fmt::Debug {
/// Get the element's name
fn element_name(&self) -> &'static str;
/// Outputs element to string for debug purposes
fn to_string(&self) -> String;
/// Gets the element as a referenceable i.e an element that can be referenced
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { None }
@ -50,10 +53,16 @@ pub trait Element: Downcast + core::fmt::Debug {
fn as_container(&self) -> Option<&dyn ContainerElement> { None }
/// Compiles element
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String>;
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String>;
}
impl_downcast!(Element);
impl core::fmt::Debug for dyn Element {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string())
}
}
pub trait ReferenceableElement: Element {
/// Reference name
fn reference_name(&self) -> Option<&String>;
@ -62,17 +71,13 @@ pub trait ReferenceableElement: Element {
fn refcount_key(&self) -> &'static str;
/// Creates the reference element
fn compile_reference(
&self,
compiler: &Compiler,
document: &dyn Document,
reference: &InternalReference,
refid: usize,
) -> Result<String, String>;
fn compile_reference(&self, compiler: &Compiler, document: &dyn Document, reference: &Reference, refid: usize) -> Result<String, String>;
}
/// Gets the refid for a compiler. The refid is some key that can be used from an external
/// document to reference this element.
fn refid(&self, compiler: &Compiler, refid: usize) -> String;
impl core::fmt::Debug for dyn ReferenceableElement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string())
}
}
pub trait ContainerElement: Element {
@ -83,6 +88,12 @@ pub trait ContainerElement: Element {
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String>;
}
impl core::fmt::Debug for dyn ContainerElement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string())
}
}
#[derive(Debug)]
pub struct DocumentEnd(pub Token);
@ -93,7 +104,9 @@ impl Element for DocumentEnd {
fn element_name(&self) -> &'static str { "Document End" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(String::new())
}
}

View file

@ -1,27 +1,26 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::{cell::RefCell, rc::Rc};
use crate::parser::source::Source;
use super::document::Document;
use super::document::Scope;
use super::element::Element;
use super::{document::{Document, Scope}, element::Element};
#[derive(Debug)]
pub struct LangDocument<'a> {
source: Rc<dyn Source>,
parent: Option<&'a dyn Document<'a>>,
/// Document's parent
parent: Option<&'a dyn Document<'a>>, /// Document's parent
// FIXME: Render these fields private
pub content: RefCell<Vec<Box<dyn Element>>>,
pub scope: RefCell<Scope>,
}
impl<'a> LangDocument<'a> {
pub fn new(source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Self {
impl<'a> LangDocument<'a>
{
pub fn new(source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Self
{
Self {
source,
parent,
source: source,
parent: parent,
content: RefCell::new(Vec::new()),
scope: RefCell::new(Scope::new()),
}
@ -29,13 +28,11 @@ impl<'a> LangDocument<'a> {
}
impl<'a> Document<'a> for LangDocument<'a> {
fn source(&self) -> Rc<dyn Source> { self.source.clone() }
fn source(&self) -> Rc<dyn Source> { self.source.clone() }
fn parent(&self) -> Option<&'a dyn Document<'a>> {
self.parent.map(|p| p as &dyn Document<'a>)
}
fn parent(&self) -> Option<&'a dyn Document<'a>> { self.parent.and_then(|p| Some(p as &dyn Document<'a>)) }
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }
fn scope(&self) -> &RefCell<Scope> { &self.scope }
fn scope(&self) -> &RefCell<Scope> { &self.scope }
}

View file

@ -45,7 +45,6 @@ pub mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::parser::parser::ParserState;
#[test]
fn validate_refname_tests() {
@ -55,7 +54,7 @@ pub mod tests {
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
let doc = parser.parse(source, None);
assert_eq!(validate_refname(&*doc, " abc ", true), Ok("abc"));
assert_eq!(

View file

@ -1,14 +1,11 @@
use std::{path::PathBuf, rc::Rc};
use crate::{elements::text::Text, parser::{parser::Parser, source::{Source, Token, VirtualSource}}};
use super::document::Document;
use crate::elements::text::Text;
use crate::parser::parser::ParserState;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use std::path::PathBuf;
use std::rc::Rc;
// TODO enforce to_string(from_string(to_string())) == to_string()
pub trait Variable {
pub trait Variable
{
fn location(&self) -> &Token;
fn name(&self) -> &str;
@ -18,99 +15,90 @@ pub trait Variable {
/// Converts variable to a string
fn to_string(&self) -> String;
fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document<'a>);
fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>);
}
impl core::fmt::Debug for dyn Variable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Debug for dyn Variable
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}{{{}}}", self.name(), self.to_string())
}
}
}
#[derive(Debug)]
pub struct BaseVariable {
pub struct BaseVariable
{
location: Token,
name: String,
value: String,
name: String,
value: String,
}
impl BaseVariable {
pub fn new(location: Token, name: String, value: String) -> Self {
Self {
location,
name,
value,
}
}
pub fn new(location: Token, name: String, value: String) -> Self {
Self { location, name, value }
}
}
impl Variable for BaseVariable {
impl Variable for BaseVariable
{
fn location(&self) -> &Token { &self.location }
fn name(&self) -> &str { self.name.as_str() }
fn name(&self) -> &str { self.name.as_str() }
fn from_string(&mut self, str: &str) -> Option<String> {
self.value = str.to_string();
None
}
fn from_string(&mut self, str: &str) -> Option<String> {
self.value = str.to_string();
None
}
fn to_string(&self) -> String { self.value.clone() }
fn to_string(&self) -> String { self.value.clone() }
fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) {
fn parse<'a>(&self, _location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>) {
let source = Rc::new(VirtualSource::new(
self.location().clone(),
self.location().clone(),
self.name().to_string(),
self.to_string(),
));
self.to_string()));
state.with_state(|new_state| {
let _ = new_state.parser.parse_into(new_state, source, document);
});
parser.parse_into(source, document);
}
}
#[derive(Debug)]
pub struct PathVariable {
pub struct PathVariable
{
location: Token,
name: String,
path: PathBuf,
name: String,
path: PathBuf,
}
impl PathVariable {
pub fn new(location: Token, name: String, path: PathBuf) -> Self {
Self {
location,
name,
path,
}
}
impl PathVariable
{
pub fn new(location: Token, name: String, path: PathBuf) -> Self {
Self { location, name, path }
}
}
impl Variable for PathVariable {
impl Variable for PathVariable
{
fn location(&self) -> &Token { &self.location }
fn name(&self) -> &str { self.name.as_str() }
fn name(&self) -> &str { self.name.as_str() }
fn from_string(&mut self, str: &str) -> Option<String> {
self.path = std::fs::canonicalize(str).unwrap();
None
}
fn from_string(&mut self, str: &str) -> Option<String> {
self.path = PathBuf::from(std::fs::canonicalize(str).unwrap());
None
}
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) {
fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document) {
let source = Rc::new(VirtualSource::new(
location,
self.name().to_string(),
self.to_string(),
));
self.to_string()));
state.push(
document,
Box::new(Text::new(
Token::new(0..source.content().len(), source),
self.to_string(),
)),
);
}
parser.push(document, Box::new(Text::new(
Token::new(0..source.content().len(), source),
self.to_string()
)));
}
}

View file

@ -1,522 +0,0 @@
use core::fmt;
use std::any::Any;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use blockquote_style::AuthorPos::After;
use blockquote_style::AuthorPos::Before;
use blockquote_style::BlockquoteStyle;
use regex::Match;
use regex::Regex;
use runtime_format::FormatArgs;
use runtime_format::FormatError;
use runtime_format::FormatKey;
use runtime_format::FormatKeyError;
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::compiler::compiler::Target::HTML;
use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::DocumentEnd;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::style::StyleHolder;
use crate::parser::util::process_escaped;
use crate::parser::util::Property;
use crate::parser::util::PropertyParser;
#[derive(Debug)]
pub struct Blockquote {
pub(self) location: Token,
pub(self) content: Vec<Box<dyn Element>>,
pub(self) author: Option<String>,
pub(self) cite: Option<String>,
pub(self) url: Option<String>,
/// Style of the blockquote
pub(self) style: Rc<blockquote_style::BlockquoteStyle>,
}
struct FmtPair<'a>(Target, &'a Blockquote);
impl FormatKey for FmtPair<'_> {
fn fmt(&self, key: &str, f: &mut fmt::Formatter<'_>) -> Result<(), FormatKeyError> {
match key {
"author" => write!(
f,
"{}",
Compiler::sanitize(self.0, self.1.author.as_ref().unwrap_or(&"".into()))
)
.map_err(FormatKeyError::Fmt),
"cite" => write!(
f,
"{}",
Compiler::sanitize(self.0, self.1.cite.as_ref().unwrap_or(&"".into()))
)
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
}
}
}
impl Element for Blockquote {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Blockquote" }
fn compile(
&self,
compiler: &Compiler,
document: &dyn Document,
cursor: usize,
) -> Result<String, String> {
match compiler.target() {
HTML => {
let mut result = r#"<div class="blockquote-content">"#.to_string();
let format_author = || -> Result<String, String> {
let mut result = String::new();
if self.cite.is_some() || self.author.is_some() {
result += r#"<p class="blockquote-author">"#;
let fmt_pair = FmtPair(compiler.target(), self);
let format_string = match (self.author.is_some(), self.cite.is_some()) {
(true, true) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[0].as_str())
}
(true, false) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[1].as_str())
}
(false, false) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[2].as_str())
}
_ => panic!(""),
};
let args = FormatArgs::new(format_string.as_str(), &fmt_pair);
args.status().map_err(|err| {
format!("Failed to format Blockquote style `{format_string}`: {err}")
})?;
result += args.to_string().as_str();
result += "</p>";
}
Ok(result)
};
if let Some(url) = &self.url {
result += format!(r#"<blockquote cite="{}">"#, Compiler::sanitize(HTML, url))
.as_str();
} else {
result += "<blockquote>";
}
if self.style.author_pos == Before {
result += format_author()?.as_str();
}
let mut in_paragraph = false;
for elem in &self.content {
if elem.downcast_ref::<DocumentEnd>().is_some() {
} else if elem.downcast_ref::<Blockquote>().is_some() {
if in_paragraph {
result += "</p>";
in_paragraph = false;
}
result += elem
.compile(compiler, document, cursor + result.len())?
.as_str();
} else {
if !in_paragraph {
result += "<p>";
in_paragraph = true;
}
result += elem
.compile(compiler, document, cursor + result.len())?
.as_str();
}
}
if in_paragraph {
result += "</p>";
}
result += "</blockquote>";
if self.style.author_pos == After {
result += format_author().map_err(|err| err.to_string())?.as_str();
}
result += "</div>";
Ok(result)
}
_ => todo!(""),
}
}
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
impl ContainerElement for Blockquote {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.kind() == ElemKind::Block {
return Err("Cannot add block element inside a blockquote".to_string());
}
self.content.push(elem);
Ok(())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::blockquote")]
pub struct BlockquoteRule {
start_re: Regex,
continue_re: Regex,
properties: PropertyParser,
}
impl BlockquoteRule {
pub fn new() -> Self {
let mut props = HashMap::new();
props.insert(
"author".to_string(),
Property::new(false, "Quote author".to_string(), None),
);
props.insert(
"cite".to_string(),
Property::new(false, "Quote source".to_string(), None),
);
props.insert(
"url".to_string(),
Property::new(false, "Quote source url".to_string(), None),
);
Self {
start_re: Regex::new(r"(?:^|\n)>(?:\[((?:\\.|[^\\\\])*?)\])?\s*?(.*)").unwrap(),
continue_re: Regex::new(r"(?:^|\n)>\s*?(.*)").unwrap(),
properties: PropertyParser { properties: props },
}
}
fn parse_properties(
&self,
m: Match,
) -> Result<(Option<String>, Option<String>, Option<String>), String> {
let processed = process_escaped('\\', "]", m.as_str());
let pm = self.properties.parse(processed.as_str())?;
let author = pm
.get("author", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
let cite = pm
.get("cite", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
let url = pm
.get("url", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
Ok((author, cite, url))
}
}
impl Rule for BlockquoteRule {
fn name(&self) -> &'static str { "Blockquote" }
fn previous(&self) -> Option<&'static str> { Some("List") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos)
.map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let content = cursor.source.content();
let mut end_cursor = cursor.clone();
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
return (end_cursor, reports);
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
// Properties
let mut author = None;
let mut cite = None;
let mut url = None;
if let Some(properties) = captures.get(1) {
match self.parse_properties(properties) {
Err(err) => {
reports.push(
Report::build(
ReportKind::Warning,
cursor.source.clone(),
properties.start(),
)
.with_message("Invalid Blockquote Properties")
.with_label(
Label::new((cursor.source.clone(), properties.range()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
);
return (end_cursor, reports);
}
Ok(props) => (author, cite, url) = props,
}
}
// Content
let entry_start = captures.get(0).unwrap().start();
let mut entry_content = captures.get(2).unwrap().as_str().to_string();
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
let trimmed = captures.get(1).unwrap().as_str().trim_start().trim_end();
entry_content += "\n";
entry_content += trimmed;
}
// Parse entry content
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
let entry_src = Rc::new(VirtualSource::new(
token.clone(),
"Blockquote Entry".to_string(),
entry_content,
));
// Parse content
let parsed_doc = state.with_state(|new_state| {
new_state
.parser
.parse(new_state, entry_src, Some(document))
.0
});
// Extract paragraph and nested blockquotes
let mut parsed_content: Vec<Box<dyn Element>> = vec![];
for mut elem in parsed_doc.content().borrow_mut().drain(..) {
if let Some(paragraph) = elem.downcast_mut::<Paragraph>() {
if let Some(last) = parsed_content.last() {
if last.kind() == ElemKind::Inline {
parsed_content.push(Box::new(Text {
location: Token::new(
last.location().end()..last.location().end(),
last.location().source(),
),
content: " ".to_string(),
}) as Box<dyn Element>);
}
}
parsed_content.extend(std::mem::take(&mut paragraph.content));
} else if elem.downcast_ref::<Blockquote>().is_some() {
parsed_content.push(elem);
} else {
reports.push(
Report::build(ReportKind::Error, token.source(), token.range.start)
.with_message("Unable to Parse Blockquote Entry")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("Blockquotes may only contain paragraphs and other blockquotes")
.with_color(state.parser.colors().error),
)
.finish(),
);
return (end_cursor, reports);
}
}
// Get style
let style = state
.shared
.styles
.borrow()
.current(blockquote_style::STYLE_KEY)
.downcast_rc::<BlockquoteStyle>()
.unwrap();
state.push(
document,
Box::new(Blockquote {
location: Token::new(entry_start..end_cursor.pos, end_cursor.source.clone()),
content: parsed_content,
author,
cite,
url,
style,
}),
);
}
(end_cursor, reports)
}
fn register_styles(&self, holder: &mut StyleHolder) {
holder.set_current(Rc::new(BlockquoteStyle::default()));
}
}
mod blockquote_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.blockquote";
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub enum AuthorPos {
Before,
After,
None,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct BlockquoteStyle {
pub author_pos: AuthorPos,
pub format: [String; 3],
}
impl Default for BlockquoteStyle {
fn default() -> Self {
Self {
author_pos: AuthorPos::After,
format: [
"{author}, {cite}".into(),
"{author}".into(),
"{cite}".into(),
],
}
}
}
impl_elementstyle!(BlockquoteStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
BEFORE
>[author=A, cite=B, url=C] Some entry
> contin**ued here
> **
AFTER
> Another
>
> quote
>>[author=B] Nested
>>> More nested
END
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph { Text{ content == "BEFORE" }; };
Blockquote {
author == Some("A".to_string()),
cite == Some("B".to_string()),
url == Some("C".to_string())
} {
Text { content == "Some entry contin" };
Style;
Text { content == "ued here" };
Style;
};
Paragraph { Text{ content == "AFTER" }; };
Blockquote {
Text { content == "Another" };
Text { content == " " };
Text { content == "quote" };
Blockquote { author == Some("B".to_string()) } {
Text { content == "Nested" };
Blockquote {
Text { content == "More nested" };
};
};
};
Paragraph { Text{ content == "END" }; };
);
}
#[test]
pub fn style() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
@@style.blockquote = {
"author_pos": "Before",
"format": ["{cite} by {author}", "Author: {author}", "From: {cite}"]
}
PRE
>[author=A, cite=B, url=C] Some entry
> contin**ued here
> **
AFTER
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (_, state) = parser.parse(ParserState::new(&parser, None), source, None);
let style = state
.shared
.styles
.borrow()
.current(blockquote_style::STYLE_KEY)
.downcast_rc::<BlockquoteStyle>()
.unwrap();
assert_eq!(style.author_pos, Before);
assert_eq!(
style.format,
[
"{cite} by {author}".to_string(),
"Author: {author}".to_string(),
"From: {cite}".to_string()
]
);
}
}

View file

@ -25,7 +25,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
@ -123,8 +123,8 @@ impl Code {
}
result +=
"<div class=\"code-block-content\"><table cellspacing=\"0\">".to_string().as_str();
for (line_id, line) in self.code.split('\n').enumerate() {
format!("<div class=\"code-block-content\"><table cellspacing=\"0\">").as_str();
for (line_id, line) in self.code.split(|c| c == '\n').enumerate() {
result += "<tr><td class=\"code-block-gutter\">";
// Line number
@ -137,7 +137,7 @@ impl Code {
Err(e) => {
return Err(format!(
"Error highlighting line `{line}`: {}",
e
e.to_string()
))
}
Ok(regions) => {
@ -146,7 +146,7 @@ impl Code {
syntect::html::IncludeBackground::No,
) {
Err(e) => {
return Err(format!("Error highlighting code: {}", e))
return Err(format!("Error highlighting code: {}", e.to_string()))
}
Ok(highlighted) => {
result += if highlighted.is_empty() {
@ -165,14 +165,14 @@ impl Code {
} else if self.block == CodeKind::MiniBlock {
result += "<div class=\"code-block\"><div class=\"code-block-content\"><table cellspacing=\"0\">";
for line in self.code.split('\n') {
for line in self.code.split(|c| c == '\n') {
result += "<tr><td class=\"code-block-line\"><pre>";
// Code
match h.highlight_line(line, Code::get_syntaxes()) {
Err(e) => {
return Err(format!(
"Error highlighting line `{line}`: {}",
e
e.to_string()
))
}
Ok(regions) => {
@ -181,7 +181,7 @@ impl Code {
syntect::html::IncludeBackground::No,
) {
Err(e) => {
return Err(format!("Error highlighting code: {}", e))
return Err(format!("Error highlighting code: {}", e.to_string()))
}
Ok(highlighted) => {
result += if highlighted.is_empty() {
@ -203,7 +203,7 @@ impl Code {
return Err(format!(
"Error highlighting line `{}`: {}",
self.code,
e
e.to_string()
))
}
Ok(regions) => {
@ -212,7 +212,7 @@ impl Code {
syntect::html::IncludeBackground::No,
) {
Err(e) => {
return Err(format!("Error highlighting code: {}", e))
return Err(format!("Error highlighting code: {}", e.to_string()))
}
Ok(highlighted) => result += highlighted.as_str(),
}
@ -244,10 +244,11 @@ impl Cached for Code {
fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new();
hasher.input((self.block as usize).to_be_bytes().as_slice());
hasher.input(self.line_offset.to_be_bytes().as_slice());
if let Some(theme) = self.theme
.as_ref() { hasher.input(theme.as_bytes()) }
if let Some(name) = self.name.as_ref() { hasher.input(name.as_bytes()) }
hasher.input((self.line_offset as usize).to_be_bytes().as_slice());
self.theme
.as_ref()
.map(|theme| hasher.input(theme.as_bytes()));
self.name.as_ref().map(|name| hasher.input(name.as_bytes()));
hasher.input(self.language.as_bytes());
hasher.input(self.code.as_bytes());
@ -262,20 +263,22 @@ impl Element for Code {
fn element_name(&self) -> &'static str { "Code Block" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() {
if let Err(e) = Code::init(con) {
if let Some(mut con) = compiler.cache() {
if let Err(e) = Code::init(&mut con) {
eprintln!("Unable to create cache table: {e}");
}
}
});
if let Some(con) = compiler.cache() {
match self.cached(con, |s| s.highlight_html(compiler)) {
if let Some(mut con) = compiler.cache() {
match self.cached(&mut con, |s| s.highlight_html(compiler)) {
Ok(s) => Ok(s),
Err(e) => match e {
CachedError::SqlErr(e) => {
@ -295,7 +298,6 @@ impl Element for Code {
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::code")]
pub struct CodeRule {
re: [Regex; 2],
properties: PropertyParser,
@ -323,22 +325,21 @@ impl CodeRule {
)
.unwrap(),
],
properties: PropertyParser { properties: props },
properties: PropertyParser::new(props),
}
}
}
impl RegexRule for CodeRule {
fn name(&self) -> &'static str { "Code" }
fn previous(&self) -> Option<&'static str> { Some("Blockquote") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
fn on_regex_match<'a>(
&self,
index: usize,
state: &ParserState,
document: &dyn Document,
parser: &dyn Parser,
document: &'a dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
@ -354,7 +355,7 @@ impl RegexRule for CodeRule {
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Code is missing properties: {e}"))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -372,7 +373,7 @@ impl RegexRule for CodeRule {
.with_label(
Label::new((token.source().clone(), props.range()))
.with_message(e)
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -394,7 +395,7 @@ impl RegexRule for CodeRule {
.with_label(
Label::new((token.source().clone(), lang.range()))
.with_message("No language specified")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -412,9 +413,9 @@ impl RegexRule for CodeRule {
Label::new((token.source().clone(), lang.range()))
.with_message(format!(
"Language `{}` cannot be found",
code_lang.fg(state.parser.colors().info)
code_lang.fg(parser.colors().info)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -431,7 +432,7 @@ impl RegexRule for CodeRule {
} else {
util::process_escaped('\\', "``", matches.get(3).unwrap().as_str())
};
if code_content.bytes().last() == Some(b'\n')
if code_content.bytes().last() == Some('\n' as u8)
// Remove newline
{
code_content.pop();
@ -444,7 +445,7 @@ impl RegexRule for CodeRule {
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message("Code content cannot be empty")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -452,7 +453,8 @@ impl RegexRule for CodeRule {
}
let theme = document
.get_variable("code.theme").map(|var| var.to_string());
.get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
if index == 0
// Block
@ -475,9 +477,9 @@ impl RegexRule for CodeRule {
.with_label(
Label::new((token.source().clone(), token.start()+1..token.end()))
.with_message(format!("Property `line_offset: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)))
.with_color(state.parser.colors().warning))
prop.fg(parser.colors().info),
err.fg(parser.colors().error)))
.with_color(parser.colors().warning))
.finish());
return reports;
}
@ -492,9 +494,9 @@ impl RegexRule for CodeRule {
))
.with_message(format!(
"Property `{}` doesn't exist",
err.fg(state.parser.colors().info)
err.fg(parser.colors().info)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -504,7 +506,7 @@ impl RegexRule for CodeRule {
}
};
state.push(
parser.push(
document,
Box::new(Code::new(
token.clone(),
@ -525,7 +527,7 @@ impl RegexRule for CodeRule {
CodeKind::Inline
};
state.push(
parser.push(
document,
Box::new(Code::new(
token.clone(),
@ -542,7 +544,8 @@ impl RegexRule for CodeRule {
reports
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
// TODO
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push_inline".to_string(),
@ -551,9 +554,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| {
let theme = ctx
.document
.get_variable("code.theme").map(|var| var.to_string());
.get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Code {
location: ctx.location.clone(),
@ -581,9 +585,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| {
let theme = ctx
.document
.get_variable("code.theme").map(|var| var.to_string());
.get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Code {
location: ctx.location.clone(),
@ -618,9 +623,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| {
let theme = ctx
.document
.get_variable("code.theme").map(|var| var.to_string());
.get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Code {
location: ctx.location.clone(),
@ -649,7 +655,6 @@ impl RegexRule for CodeRule {
mod tests {
use super::*;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
#[test]
@ -661,7 +666,7 @@ mod tests {
static int INT32_MIN = 0x80000000;
```
%<nml.code.push_block("Lua", "From Lua", "print(\"Hello, World!\")", nil)>%
``Rust,
``Rust
fn fact(n: usize) -> usize
{
match n
@ -677,7 +682,8 @@ fn fact(n: usize) -> usize
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
//let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow();
let found = borrow
@ -723,7 +729,8 @@ fn fact(n: usize) -> usize
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
//let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow();
let found = borrow

View file

@ -1,131 +1,84 @@
use crate::compiler::compiler::Compiler;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::parser::parser::ParserState;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use regex::Captures;
use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
use mlua::{Function, Lua};
use regex::{Captures, Regex};
use crate::{document::document::Document, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}};
use ariadne::{Report, Label, ReportKind};
use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}};
use std::{ops::Range, rc::Rc};
#[derive(Debug)]
pub struct Comment {
pub location: Token,
#[allow(unused)]
pub content: String,
location: Token,
content: String,
}
impl Element for Comment {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Invisible }
fn element_name(&self) -> &'static str { "Comment" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
impl Comment
{
pub fn new(location: Token, content: String ) -> Self {
Self { location: location, content }
}
}
impl Element for Comment
{
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Invisible }
fn element_name(&self) -> &'static str { "Comment" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document)
-> Result<String, String> {
Ok("".to_string())
}
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::comment")]
pub struct CommentRule {
re: [Regex; 1],
}
impl CommentRule {
pub fn new() -> Self {
Self {
re: [Regex::new(r"(?:(?:^|\n)|[^\S\n]+)::(.*)").unwrap()],
}
Self { re: [Regex::new(r"\s*::(.*)").unwrap()] }
}
}
impl RegexRule for CommentRule {
fn name(&self) -> &'static str { "Comment" }
fn previous(&self) -> Option<&'static str> { None }
fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match(
&self,
_: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: Captures)
-> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let content = match matches.get(1) {
let content = match matches.get(1)
{
None => panic!("Unknown error"),
Some(comment) => {
let trimmed = comment.as_str().trim_start().trim_end().to_string();
if trimmed.is_empty() {
if trimmed.is_empty()
{
reports.push(
Report::build(ReportKind::Warning, token.source(), comment.start())
.with_message("Empty comment")
.with_label(
Label::new((token.source(), comment.range()))
.with_message("Comment is empty")
.with_color(state.parser.colors().warning),
)
.finish(),
);
.with_message("Empty comment")
.with_label(
Label::new((token.source(), comment.range()))
.with_message("Comment is empty")
.with_color(parser.colors().warning))
.finish());
}
trimmed
}
};
parser.push(document, Box::new(
Comment::new(
token.clone(),
content
)
));
state.push(
document,
Box::new(Comment {
location: token.clone(),
content,
}),
);
reports
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
NOT COMMENT: `std::cmp`
:: Commented line
COMMENT ::Test
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text; Style; Text; Style;
Comment { content == "Commented line" };
Text; Comment { content == "Test" };
};
);
return reports;
}
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -1,575 +0,0 @@
use crate::lua::kernel::Kernel;
use std::any::Any;
use std::cell::Ref;
use std::cell::RefCell;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::lua::kernel::KernelContext;
use crate::lua::kernel::CTX;
use crate::parser::customstyle::CustomStyle;
use crate::parser::customstyle::CustomStyleToken;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use crate::parser::state::Scope;
use super::paragraph::Paragraph;
#[derive(Debug)]
struct LuaCustomStyle {
pub(self) name: String,
pub(self) tokens: CustomStyleToken,
pub(self) start: String,
pub(self) end: String,
}
impl CustomStyle for LuaCustomStyle {
fn name(&self) -> &str { self.name.as_str() }
fn tokens(&self) -> &CustomStyleToken { &self.tokens }
fn on_start<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a dyn Document<'a>,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
state,
document,
};
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.start.as_str());
if let Err(err) = chunk.eval::<()>() {
reports.push(
Report::build(ReportKind::Error, location.source(), location.start())
.with_message("Lua execution failed")
.with_label(
Label::new((location.source(), location.range.clone()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.with_note(format!(
"When trying to start custom style {}",
self.name().fg(state.parser.colors().info)
))
.finish(),
);
}
});
reports
}
fn on_end<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a dyn Document<'a>,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
state,
document,
};
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.end.as_str());
if let Err(err) = chunk.eval::<()>() {
reports.push(
Report::build(ReportKind::Error, location.source(), location.start())
.with_message("Lua execution failed")
.with_label(
Label::new((location.source(), location.range.clone()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.with_note(format!(
"When trying to end custom style {}",
self.name().fg(state.parser.colors().info)
))
.finish(),
);
}
});
reports
}
}
struct CustomStyleState {
toggled: HashMap<String, Token>,
}
impl RuleState for CustomStyleState {
fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove<'a>(
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
self.toggled.iter().for_each(|(style, token)| {
let paragraph = document.last_element::<Paragraph>().unwrap();
let paragraph_end = paragraph
.content
.last().map(|last| (
last.location().source(),
last.location().end() - 1..last.location().end(),
))
.unwrap();
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated Custom Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Style {} starts here",
style.fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new(paragraph_end)
.with_order(1)
.with_message("Paragraph ends here".to_string())
.with_color(state.parser.colors().error),
)
.with_note("Styles cannot span multiple documents (i.e @import)")
.finish(),
);
});
reports
}
}
static STATE_NAME: &str = "elements.custom_style";
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::customstyle")]
pub struct CustomStyleRule;
impl CustomStyleRule {
pub fn new() -> Self { Self{} }
}
impl Rule for CustomStyleRule {
fn name(&self) -> &'static str { "Custom Style" }
fn previous(&self) -> Option<&'static str> { Some("Style") }
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
let content = cursor.source.content();
let mut closest_match = usize::MAX;
let mut matched_style = (None, false);
state
.shared
.custom_styles
.borrow()
.iter()
.for_each(|(_name, style)| match style.tokens() {
CustomStyleToken::Toggle(s) => {
if let Some(pos) = &content[cursor.pos..].find(s) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), false);
}
}
}
CustomStyleToken::Pair(begin, end) => {
if let Some(pos) = &content[cursor.pos..].find(begin) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), false);
}
}
if let Some(pos) = &content[cursor.pos..].find(end) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), true);
}
}
}
});
if closest_match == usize::MAX {
None
} else {
Some((
closest_match + cursor.pos,
Box::new((matched_style.0.unwrap().clone(), matched_style.1)) as Box<dyn Any>,
))
}
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a dyn Document<'a>,
cursor: Cursor,
match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let (style, end) = match_data
.downcast_ref::<(Rc<dyn CustomStyle>, bool)>()
.unwrap();
let mut rule_state_borrow = state.shared.rule_state.borrow_mut();
let style_state = match rule_state_borrow.get(STATE_NAME) {
Some(rule_state) => rule_state,
// Insert as a new state
None => {
match rule_state_borrow.insert(
STATE_NAME.into(),
Rc::new(RefCell::new(CustomStyleState {
toggled: HashMap::new(),
})),
) {
Err(err) => panic!("{err}"),
Ok(rule_state) => rule_state,
}
}
};
let (close, token) = match style.tokens() {
CustomStyleToken::Toggle(s) => {
let mut borrow = style_state.as_ref().borrow_mut();
let style_state = borrow.downcast_mut::<CustomStyleState>().unwrap();
if style_state.toggled.get(style.name()).is_some() {
// Terminate style
let token = Token::new(cursor.pos..cursor.pos + s.len(), cursor.source.clone());
style_state.toggled.remove(style.name());
(true, token)
} else {
// Start style
let token = Token::new(cursor.pos..cursor.pos + s.len(), cursor.source.clone());
style_state
.toggled
.insert(style.name().into(), token.clone());
(false, token)
}
}
CustomStyleToken::Pair(s_begin, s_end) => {
let mut borrow = style_state.borrow_mut();
let style_state = borrow.downcast_mut::<CustomStyleState>().unwrap();
if *end {
// Terminate style
let token =
Token::new(cursor.pos..cursor.pos + s_end.len(), cursor.source.clone());
if style_state.toggled.get(style.name()).is_none() {
return (
cursor.at(cursor.pos + s_end.len()),
vec![
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid End of Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Cannot end style {} here, is it not started anywhere",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
],
);
}
style_state.toggled.remove(style.name());
(true, token)
} else {
// Start style
let token = Token::new(
cursor.pos..cursor.pos + s_begin.len(),
cursor.source.clone(),
);
if let Some(start_token) = style_state.toggled.get(style.name()) {
return (
cursor.at(cursor.pos + s_end.len()),
vec![Report::build(
ReportKind::Error,
start_token.source(),
start_token.start(),
)
.with_message("Invalid Start of Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Style cannot {} starts here",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new((start_token.source(), start_token.range.clone()))
.with_order(2)
.with_message(format!(
"Style {} starts previously here",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish()],
);
}
style_state
.toggled
.insert(style.name().into(), token.clone());
(false, token)
}
}
};
let reports = if close {
style.on_end(token.clone(), state, document)
} else {
style.on_start(token.clone(), state, document)
};
(cursor.at(token.end()), unsafe {
std::mem::transmute(reports)
})
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"define_toggled".into(),
lua.create_function(
|_, (name, token, on_start, on_end): (String, String, String, String)| {
let mut result = Ok(());
let style = LuaCustomStyle {
tokens: CustomStyleToken::Toggle(token),
name: name.clone(),
start: on_start,
end: on_end,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(_) =
ctx.state.shared.custom_styles.borrow().get(name.as_str())
{
result = Err(BadArgument {
to: Some("define_toggled".to_string()),
pos: 1,
name: Some("name".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` already exists"
))),
});
return;
}
ctx.state
.shared
.custom_styles
.borrow_mut()
.insert(Rc::new(style));
ctx.state.reset_match("Custom Style").unwrap();
});
});
result
},
)
.unwrap(),
));
bindings.push((
"define_paired".into(),
lua.create_function(
|_,
(name, token_start, token_end, on_start, on_end): (
String,
String,
String,
String,
String,
)| {
let mut result = Ok(());
if token_start == token_end
{
return Err(BadArgument {
to: Some("define_paired".to_string()),
pos: 3,
name: Some("token_end".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` cannot be defined: The start token must differ from the end token, use `define_toggled` insteda"
))),
});
}
let style = LuaCustomStyle {
tokens: CustomStyleToken::Pair(token_start, token_end),
name: name.clone(),
start: on_start,
end: on_end,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(_) = ctx.state.shared.custom_styles.borrow().get(name.as_str()) {
result = Err(BadArgument {
to: Some("define_paired".to_string()),
pos: 1,
name: Some("name".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` already exists"
))),
});
return;
}
ctx.state.shared.custom_styles.borrow_mut().insert(Rc::new(style));
ctx.state.reset_match("Custom Style").unwrap();
});
});
result
},
)
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::elements::raw::Raw;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn toggle() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<[main]
function my_style_start()
nml.raw.push("inline", "start")
end
function my_style_end()
nml.raw.push("inline", "end")
end
function red_style_start()
nml.raw.push("inline", "<a style=\"color:red\">")
end
function red_style_end()
nml.raw.push("inline", "</a>")
end
nml.custom_style.define_toggled("My Style", "|", "my_style_start()", "my_style_end()")
nml.custom_style.define_toggled("My Style2", "°", "red_style_start()", "red_style_end()")
>%
pre |styled| post °Hello°.
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "pre " };
Raw { content == "start" };
Text { content == "styled" };
Raw { content == "end" };
Text { content == " post " };
Raw { content == "<a style=\"color:red\">" };
Text { content == "Hello" };
Raw { content == "</a>" };
Text { content == "." };
};
);
}
#[test]
fn paired() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<[main]
function my_style_start()
nml.raw.push("inline", "start")
end
function my_style_end()
nml.raw.push("inline", "end")
end
function red_style_start()
nml.raw.push("inline", "<a style=\"color:red\">")
end
function red_style_end()
nml.raw.push("inline", "</a>")
end
nml.custom_style.define_paired("My Style", "[", "]", "my_style_start()", "my_style_end()")
nml.custom_style.define_paired("My Style2", "(", ")", "red_style_start()", "red_style_end()")
>%
pre [styled] post (Hello).
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "pre " };
Raw { content == "start" };
Text { content == "styled" };
Raw { content == "end" };
Text { content == " post " };
Raw { content == "<a style=\"color:red\">" };
Text { content == "Hello" };
Raw { content == "</a>" };
Text { content == "." };
};
);
}
}

View file

@ -1,224 +0,0 @@
use crate::parser::style::ElementStyle;
use std::any::Any;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use mlua::Value;
use regex::Regex;
use crate::document::document::Document;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::elemstyle")]
pub struct ElemStyleRule {
start_re: Regex,
}
impl ElemStyleRule {
pub fn new() -> Self {
Self {
start_re: Regex::new(r"(?:^|\n)@@(.*?)=\s*\{").unwrap(),
}
}
/// Finds the json substring inside aother string
pub fn json_substring(str: &str) -> Option<&str> {
let mut in_string = false;
let mut brace_depth = 0;
let mut escaped = false;
for (pos, c) in str.char_indices() {
match c {
'{' if !in_string => brace_depth += 1,
'}' if !in_string => brace_depth -= 1,
'\\' if in_string => escaped = !escaped,
'"' if !escaped => in_string = !in_string,
_ => escaped = false,
}
if brace_depth == 0 {
return Some(&str[..=pos]);
}
}
None
}
}
impl Rule for ElemStyleRule {
fn name(&self) -> &'static str { "Element Style" }
fn previous(&self) -> Option<&'static str> { Some("Script") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
}
fn on_match<'a>(
&self,
state: &ParserState,
_document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let matches = self
.start_re
.captures_at(cursor.source.content(), cursor.pos)
.unwrap();
let mut cursor = cursor.at(matches.get(0).unwrap().end() - 1);
let style: Rc<dyn ElementStyle> = if let Some(key) = matches.get(1) {
let trimmed = key.as_str().trim_start().trim_end();
// Check if empty
if trimmed.is_empty() {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), key.start())
.with_message("Empty Style Key")
.with_label(
Label::new((cursor.source.clone(), key.range()))
.with_message("Expected a non-empty style key".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
// Check if key exists
if !state.shared.styles.borrow().is_registered(trimmed) {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), key.start())
.with_message("Unknown Style Key")
.with_label(
Label::new((cursor.source.clone(), key.range()))
.with_message(format!(
"Could not find a style with key: {}",
trimmed.fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
state.shared.styles.borrow().current(trimmed)
} else {
panic!("Unknown error")
};
// Get value
let new_style = match ElemStyleRule::json_substring(
&cursor.source.clone().content().as_str()[cursor.pos..],
) {
None => {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), cursor.pos)
.with_message("Invalid Style Value")
.with_label(
Label::new((cursor.source.clone(), matches.get(0).unwrap().range()))
.with_message("Unable to parse json string after style key".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
Some(json) => {
cursor = cursor.at(cursor.pos + json.len());
// Attempt to deserialize
match style.from_json(json) {
Err(err) => {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), cursor.pos)
.with_message("Invalid Style Value")
.with_label(
Label::new((
cursor.source.clone(),
cursor.pos..cursor.pos + json.len(),
))
.with_message(format!(
"Failed to serialize `{}` into style with key `{}`: {err}",
json.fg(state.parser.colors().highlight),
style.key().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
Ok(style) => style,
}
}
};
state.shared.styles.borrow_mut().set_current(new_style);
(cursor, reports)
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"set".to_string(),
lua.create_function(|lua, (style_key, new_style): (String, Value)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if !ctx
.state
.shared
.styles
.borrow()
.is_registered(style_key.as_str())
{
result = Err(BadArgument {
to: Some("set".to_string()),
pos: 1,
name: Some("style_key".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to find style with key: {style_key}"
))),
});
return;
}
let style = ctx.state.shared.styles.borrow().current(style_key.as_str());
let new_style = match style.from_lua(lua, new_style) {
Err(err) => {
result = Err(err);
return;
}
Ok(new_style) => new_style,
};
ctx.state.shared.styles.borrow_mut().set_current(new_style);
})
});
result
})
.unwrap(),
));
bindings
}
}

View file

@ -1,11 +1,8 @@
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use std::sync::Once;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::util::Property;
use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser;
@ -18,7 +15,6 @@ use crypto::sha2::Sha512;
use graphviz_rust::cmd::Format;
use graphviz_rust::cmd::Layout;
use graphviz_rust::exec_dot;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
@ -31,6 +27,7 @@ use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
@ -42,6 +39,7 @@ struct Graphviz {
pub dot: String,
pub layout: Layout,
pub width: String,
pub caption: Option<String>,
}
fn layout_from_str(value: &str) -> Result<Layout, String> {
@ -73,7 +71,7 @@ impl Graphviz {
let split_at = out.split_at(svg_start).1.find('\n').unwrap();
let mut result = format!("<svg width=\"{}\"", self.width);
result.push_str(out.split_at(svg_start + split_at).1);
result.push_str(out.split_at(svg_start+split_at).1);
result
}
@ -102,7 +100,6 @@ impl Cached for Graphviz {
fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new();
hasher.input((self.layout as usize).to_be_bytes().as_slice());
hasher.input(self.width.as_bytes());
hasher.input(self.dot.as_bytes());
hasher.result_str()
@ -116,26 +113,23 @@ impl Element for Graphviz {
fn element_name(&self) -> &'static str { "Graphviz" }
fn compile(
&self,
compiler: &Compiler,
_document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() {
if let Err(e) = Graphviz::init(con) {
if let Some(mut con) = compiler.cache() {
if let Err(e) = Graphviz::init(&mut con) {
eprintln!("Unable to create cache table: {e}");
}
}
});
// TODO: Format svg in a div
if let Some(con) = compiler.cache() {
match self.cached(con, |s| s.dot_to_svg()) {
if let Some(mut con) = compiler.cache() {
match self.cached(&mut con, |s| s.dot_to_svg()) {
Ok(s) => Ok(s),
Err(e) => match e {
CachedError::SqlErr(e) => {
@ -156,7 +150,6 @@ impl Element for Graphviz {
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::graphviz")]
pub struct GraphRule {
re: [Regex; 1],
properties: PropertyParser,
@ -175,28 +168,31 @@ impl GraphRule {
);
props.insert(
"width".to_string(),
Property::new(true, "SVG width".to_string(), Some("100%".to_string())),
Property::new(
true,
"SVG width".to_string(),
Some("100%".to_string()),
),
);
Self {
re: [Regex::new(
r"\[graph\](?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\[/graph\])?",
)
.unwrap()],
properties: PropertyParser { properties: props },
properties: PropertyParser::new(props),
}
}
}
impl RegexRule for GraphRule {
fn name(&self) -> &'static str { "Graphviz" }
fn previous(&self) -> Option<&'static str> { Some("Tex") }
fn name(&self) -> &'static str { "Graph" }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
_: usize,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
token: Token,
matches: Captures,
@ -213,10 +209,10 @@ impl RegexRule for GraphRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Missing terminating `{}` after first `{}`",
"[/graph]".fg(state.parser.colors().info),
"[graph]".fg(state.parser.colors().info)
"[/graph]".fg(parser.colors().info),
"[graph]".fg(parser.colors().info)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -236,7 +232,7 @@ impl RegexRule for GraphRule {
.with_label(
Label::new((token.source().clone(), content.range()))
.with_message("Graph code is empty")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -257,7 +253,7 @@ impl RegexRule for GraphRule {
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Graph is missing property: {e}"))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -275,7 +271,7 @@ impl RegexRule for GraphRule {
.with_label(
Label::new((token.source().clone(), props.range()))
.with_message(e)
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -300,10 +296,10 @@ impl RegexRule for GraphRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Property `layout: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)
prop.fg(parser.colors().info),
err.fg(parser.colors().error)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -319,7 +315,7 @@ impl RegexRule for GraphRule {
token.start() + 1..token.end(),
))
.with_message(err)
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -346,126 +342,34 @@ impl RegexRule for GraphRule {
))
.with_message(format!(
"Property `{}` is missing",
err.fg(state.parser.colors().info)
err.fg(parser.colors().info)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
return reports;
}
_ => panic!("Unknown error"),
_ => panic!("Unknown error")
},
};
state.push(
// TODO: Caption
parser.push(
document,
Box::new(Graphviz {
location: token,
dot: graph_content,
layout: graph_layout,
width: graph_width,
caption: None,
}),
);
reports
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(|_, (layout, width, dot): (String, String, String)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let layout = match layout_from_str(layout.as_str()) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get layout type: {err}"
))),
});
return;
}
Ok(layout) => layout,
};
ctx.state.push(
ctx.document,
Box::new(Graphviz {
location: ctx.location.clone(),
dot,
layout,
width,
}),
);
})
});
result
})
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parse() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
[graph][width=200px, layout=neato]
Some graph...
[/graph]
[graph]
Another graph
[/graph]
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Graphviz { width == "200px", dot == "Some graph..." };
Graphviz { dot == "Another graph" };
);
}
#[test]
pub fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<nml.graphviz.push("neato", "200px", "Some graph...")>%
%<nml.graphviz.push("dot", "", "Another graph")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Graphviz { width == "200px", dot == "Some graph..." };
Graphviz { dot == "Another graph" };
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -1,6 +1,6 @@
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
@ -10,6 +10,8 @@ use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Regex;
use std::ops::Range;
@ -17,7 +19,6 @@ use std::rc::Rc;
use super::paragraph::Paragraph;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::import")]
pub struct ImportRule {
re: [Regex; 1],
}
@ -41,14 +42,13 @@ impl ImportRule {
impl RegexRule for ImportRule {
fn name(&self) -> &'static str { "Import" }
fn previous(&self) -> Option<&'static str> { Some("Paragraph") }
fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match<'a>(
&self,
_: usize,
state: &ParserState,
parser: &dyn Parser,
document: &'a dyn Document<'a>,
token: Token,
matches: Captures,
@ -57,7 +57,7 @@ impl RegexRule for ImportRule {
// Path
let import_file = match matches.get(2) {
Some(name) => match ImportRule::validate_name(state.parser.colors(), name.as_str()) {
Some(name) => match ImportRule::validate_name(parser.colors(), name.as_str()) {
Err(msg) => {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
@ -66,9 +66,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range()))
.with_message(format!(
"Import name `{}` is invalid. {msg}",
name.as_str().fg(state.parser.colors().highlight)
name.as_str().fg(parser.colors().highlight)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -85,9 +85,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range()))
.with_message(format!(
"Unable to access file `{}`",
filename.fg(state.parser.colors().highlight)
filename.fg(parser.colors().highlight)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -104,9 +104,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range()))
.with_message(format!(
"Path `{}` is not a file!",
filename.fg(state.parser.colors().highlight)
filename.fg(parser.colors().highlight)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -121,8 +121,7 @@ impl RegexRule for ImportRule {
// [Optional] import as
let import_as = match matches.get(1) {
Some(as_name) => match ImportRule::validate_as(state.parser.colors(), as_name.as_str())
{
Some(as_name) => match ImportRule::validate_as(parser.colors(), as_name.as_str()) {
Ok(as_name) => as_name,
Err(msg) => {
result.push(
@ -132,9 +131,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), as_name.range()))
.with_message(format!(
"Canot import `{import_file}` as `{}`. {msg}",
as_name.as_str().fg(state.parser.colors().highlight)
as_name.as_str().fg(parser.colors().highlight)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -154,7 +153,7 @@ impl RegexRule for ImportRule {
.with_label(
Label::new((token.source(), token.range))
.with_message(format!("Failed to read content from path `{path}`"))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -162,15 +161,12 @@ impl RegexRule for ImportRule {
}
};
state.with_state(|new_state| {
let (import_doc, _) = new_state.parser.parse(new_state, import, Some(document));
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
});
let import_doc = parser.parse(import, Some(document));
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
// Close paragraph
// TODO2: Check if this is safe to remove
if document.last_element::<Paragraph>().is_none() {
state.push(
if document.last_element::<Paragraph>().is_some() {
parser.push(
document,
Box::new(Paragraph {
location: Token::new(token.end()..token.end(), token.source()),
@ -179,6 +175,8 @@ impl RegexRule for ImportRule {
);
}
result
return result;
}
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -1,980 +0,0 @@
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::layout::LayoutHolder;
use crate::parser::layout::LayoutType;
use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use crate::parser::state::Scope;
use crate::parser::util::process_escaped;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Match;
use regex::Regex;
use regex::RegexBuilder;
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::str::FromStr;
use std::sync::Arc;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum LayoutToken {
Begin,
Next,
End,
}
impl FromStr for LayoutToken {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Begin" | "begin" => Ok(LayoutToken::Begin),
"Next" | "next" => Ok(LayoutToken::Next),
"End" | "end" => Ok(LayoutToken::End),
_ => Err(format!("Unable to find LayoutToken with name: {s}")),
}
}
}
mod default_layouts {
use crate::parser::layout::LayoutType;
use crate::parser::util::Property;
use crate::parser::util::PropertyParser;
use super::*;
#[derive(Debug)]
pub struct Centered(PropertyParser);
impl Default for Centered {
fn default() -> Self {
let mut properties = HashMap::new();
properties.insert(
"style".to_string(),
Property::new(
true,
"Additional style for the split".to_string(),
Some("".to_string()),
),
);
Self(PropertyParser { properties })
}
}
impl LayoutType for Centered {
fn name(&self) -> &'static str { "Centered" }
fn expects(&self) -> Range<usize> { 1..1 }
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String> {
let props = if properties.is_empty() {
self.0.default()
} else {
self.0.parse(properties)
}
.map_err(|err| {
format!(
"Failed to parse properties for layout {}: {err}",
self.name()
)
})?;
let style = props
.get("style", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.map_err(|err| format!("Failed to parse style: {err:#?}"))
.map(|(_, value)| value)?;
Ok(Some(Box::new(style)))
}
fn compile(
&self,
token: LayoutToken,
_id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
_document: &dyn Document,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let style = match properties
.as_ref()
.unwrap()
.downcast_ref::<String>()
.unwrap()
.as_str()
{
"" => "".to_string(),
str => format!(r#" style={}"#, Compiler::sanitize(compiler.target(), str)),
};
match token {
LayoutToken::Begin => Ok(format!(r#"<div class="centered"{style}>"#)),
LayoutToken::Next => panic!(),
LayoutToken::End => Ok(r#"</div>"#.to_string()),
}
}
_ => todo!(""),
}
}
}
#[derive(Debug)]
pub struct Split(PropertyParser);
impl Default for Split {
fn default() -> Self {
let mut properties = HashMap::new();
properties.insert(
"style".to_string(),
Property::new(
true,
"Additional style for the split".to_string(),
Some("".to_string()),
),
);
Self(PropertyParser { properties })
}
}
impl LayoutType for Split {
fn name(&self) -> &'static str { "Split" }
fn expects(&self) -> Range<usize> { 2..usize::MAX }
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String> {
let props = if properties.is_empty() {
self.0.default()
} else {
self.0.parse(properties)
}
.map_err(|err| {
format!(
"Failed to parse properties for layout {}: {err}",
self.name()
)
})?;
let style = props
.get("style", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.map_err(|err| format!("Failed to parse style: {err:#?}"))
.map(|(_, value)| value)?;
Ok(Some(Box::new(style)))
}
fn compile(
&self,
token: LayoutToken,
_id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
_document: &dyn Document,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let style = match properties
.as_ref()
.unwrap()
.downcast_ref::<String>()
.unwrap()
.as_str()
{
"" => "".to_string(),
str => format!(r#" style={}"#, Compiler::sanitize(compiler.target(), str)),
};
match token {
LayoutToken::Begin => Ok(format!(
r#"<div class="split-container"><div class="split"{style}>"#
)),
LayoutToken::Next => Ok(format!(r#"</div><div class="split"{style}>"#)),
LayoutToken::End => Ok(r#"</div></div>"#.to_string()),
}
}
_ => todo!(""),
}
}
}
}
#[derive(Debug)]
struct Layout {
pub(self) location: Token,
pub(self) layout: Rc<dyn LayoutType>,
pub(self) id: usize,
pub(self) token: LayoutToken,
pub(self) properties: Option<Box<dyn Any>>,
}
impl Element for Layout {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Layout" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, _cursor: usize) -> Result<String, String> {
self.layout
.compile(self.token, self.id, &self.properties, compiler, document)
}
}
struct LayoutState {
/// The layout stack
pub(self) stack: Vec<(Vec<Token>, Rc<dyn LayoutType>)>,
}
impl RuleState for LayoutState {
fn scope(&self) -> Scope { Scope::DOCUMENT }
fn on_remove<'a>(
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let doc_borrow = document.content().borrow();
let at = doc_borrow.last().unwrap().location();
for (tokens, layout_type) in &self.stack {
let start = tokens.first().unwrap();
reports.push(
Report::build(ReportKind::Error, start.source(), start.start())
.with_message("Unterminated Layout")
.with_label(
Label::new((start.source(), start.range.start + 1..start.range.end))
.with_order(1)
.with_message(format!(
"Layout {} stars here",
layout_type.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new((at.source(), at.range.clone()))
.with_order(2)
.with_message("Document ends here".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
}
reports
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::layout")]
pub struct LayoutRule {
re: [Regex; 3],
}
impl LayoutRule {
pub fn new() -> Self {
Self {
re: [
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_BEGIN(?:\[((?:\\.|[^\\\\])*?)\])?(.*)",
)
.multi_line(true)
.build()
.unwrap(),
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_NEXT(?:\[((?:\\.|[^\\\\])*?)\])?$",
)
.multi_line(true)
.build()
.unwrap(),
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_END(?:\[((?:\\.|[^\\\\])*?)\])?$",
)
.multi_line(true)
.build()
.unwrap(),
],
}
}
pub fn initialize_state(state: &ParserState) -> Rc<RefCell<dyn RuleState>> {
let mut rule_state_borrow = state.shared.rule_state.borrow_mut();
match rule_state_borrow.get(STATE_NAME) {
Some(state) => state,
None => {
// Insert as a new state
match rule_state_borrow.insert(
STATE_NAME.into(),
Rc::new(RefCell::new(LayoutState { stack: vec![] })),
) {
Err(err) => panic!("{err}"),
Ok(state) => state,
}
}
}
}
pub fn parse_properties<'a>(
colors: &ReportColors,
token: &Token,
layout_type: Rc<dyn LayoutType>,
properties: Option<Match>,
) -> Result<Option<Box<dyn Any>>, Report<'a, (Rc<dyn Source>, Range<usize>)>> {
match properties {
None => match layout_type.parse_properties("") {
Ok(props) => Ok(props),
Err(err) => Err(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unable to parse layout properties")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(err)
.with_color(colors.error),
)
.finish(),
),
},
Some(props) => {
let trimmed = props.as_str().trim_start().trim_end();
let content = process_escaped('\\', "]", trimmed);
match layout_type.parse_properties(content.as_str()) {
Ok(props) => Ok(props),
Err(err) => {
Err(
Report::build(ReportKind::Error, token.source(), props.start())
.with_message("Unable to parse layout properties")
.with_label(
Label::new((token.source(), props.range()))
.with_message(err)
.with_color(colors.error),
)
.finish(),
)
}
}
}
}
}
}
static STATE_NAME: &str = "elements.layout";
impl RegexRule for LayoutRule {
fn name(&self) -> &'static str { "Layout" }
fn previous(&self) -> Option<&'static str> { Some("Media") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
index: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let rule_state = LayoutRule::initialize_state(state);
if index == 0
// BEGIN_LAYOUT
{
match matches.get(2) {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Missing Layout Name")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Missing layout name after `{}`",
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(name) => {
let trimmed = name.as_str().trim_start().trim_end();
if name.as_str().is_empty() || trimmed.is_empty()
// Empty name
{
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty Layout Name")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Empty layout name after `{}`",
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
} else if !name.as_str().chars().next().unwrap().is_whitespace()
// Missing space
{
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid Layout Name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Missing a space before layout name `{}`",
name.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Get layout
let layout_type = match state.shared.layouts.borrow().get(trimmed) {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Unknown Layout")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Cannot find layout `{}`",
trimmed.fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(layout_type) => layout_type,
};
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
state.push(
document,
Box::new(Layout {
location: token.clone(),
layout: layout_type.clone(),
id: 0,
token: LayoutToken::Begin,
properties,
}),
);
rule_state
.as_ref()
.borrow_mut()
.downcast_mut::<LayoutState>()
.map_or_else(
|| panic!("Invalid state at: `{STATE_NAME}`"),
|s| s.stack.push((vec![token.clone()], layout_type.clone())),
);
}
};
return reports;
}
let (id, token_type, layout_type, properties) = if index == 1
// LAYOUT_NEXT
{
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = rule_state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, layout_type) = match layout_state.stack.last_mut() {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid #+LAYOUT_NEXT")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("No active layout found".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(last) => last,
};
if layout_type.expects().end < tokens.len()
// Too many blocks
{
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unexpected #+LAYOUT_NEXT")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Layout expects a maximum of {} blocks, currently at {}",
layout_type.expects().end.fg(state.parser.colors().info),
tokens.len().fg(state.parser.colors().info),
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
tokens.push(token.clone());
(
tokens.len() - 1,
LayoutToken::Next,
layout_type.clone(),
properties,
)
} else {
// LAYOUT_END
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = rule_state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, layout_type) = match layout_state.stack.last_mut() {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid #+LAYOUT_END")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("No active layout found".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(last) => last,
};
if layout_type.expects().start > tokens.len()
// Not enough blocks
{
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unexpected #+LAYOUT_END")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Layout expects a minimum of {} blocks, currently at {}",
layout_type.expects().start.fg(state.parser.colors().info),
tokens.len().fg(state.parser.colors().info),
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
let layout_type = layout_type.clone();
let id = tokens.len();
layout_state.stack.pop();
(id, LayoutToken::End, layout_type, properties)
};
state.push(
document,
Box::new(Layout {
location: token,
layout: layout_type,
id,
token: token_type,
properties,
}),
);
reports
}
// TODO: Add method to create new layouts
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(
|_, (token, layout, properties): (String, String, String)| {
let mut result = Ok(());
// Parse token
let layout_token = match LayoutToken::from_str(token.as_str())
{
Err(err) => {
return Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(err))
});
},
Ok(token) => token,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
// Make sure the rule state has been initialized
let rule_state = LayoutRule::initialize_state(ctx.state);
// Get layout
//
let layout_type = match ctx.state.shared.layouts.borrow().get(layout.as_str())
{
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Cannot find layout with name `{layout}`"
))),
});
return;
},
Some(layout) => layout,
};
// Parse properties
let layout_properties = match layout_type.parse_properties(properties.as_str()) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 3,
name: Some("properties".to_string()),
cause: Arc::new(mlua::Error::external(err)),
});
return;
},
Ok(properties) => properties,
};
let id = match layout_token {
LayoutToken::Begin => {
ctx.state.push(
ctx.document,
Box::new(Layout {
location: ctx.location.clone(),
layout: layout_type.clone(),
id: 0,
token: LayoutToken::Begin,
properties: layout_properties,
}),
);
rule_state
.as_ref()
.borrow_mut()
.downcast_mut::<LayoutState>()
.map_or_else(
|| panic!("Invalid state at: `{STATE_NAME}`"),
|s| s.stack.push((vec![ctx.location.clone()], layout_type.clone())),
);
return;
},
LayoutToken::Next => {
let mut state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, current_layout_type) = match layout_state.stack.last_mut() {
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external("Unable set next layout: No active layout found".to_string())),
});
return;
}
Some(last) => last,
};
if !Rc::ptr_eq(&layout_type, current_layout_type) {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!("Invalid layout next, current layout is {} vs {}",
current_layout_type.name(),
layout_type.name())))
});
return;
}
if layout_type.expects().end < tokens.len()
// Too many blocks
{
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(format!("Unable set layout next: layout {} expect at most {} blocks, currently at {} blocks",
layout_type.name(),
layout_type.expects().end,
tokens.len()
))),
});
return;
}
tokens.push(ctx.location.clone());
tokens.len() - 1
},
LayoutToken::End => {
let mut state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, current_layout_type) = match layout_state.stack.last_mut() {
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external("Unable set layout end: No active layout found".to_string())),
});
return;
}
Some(last) => last,
};
if !Rc::ptr_eq(&layout_type, current_layout_type) {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!("Invalid layout end, current layout is {} vs {}",
current_layout_type.name(),
layout_type.name())))
});
return;
}
if layout_type.expects().start > tokens.len()
// Not enough blocks
{
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(format!("Unable set next layout: layout {} expect at least {} blocks, currently at {} blocks",
layout_type.name(),
layout_type.expects().start,
tokens.len()
))),
});
return;
}
let id = tokens.len();
layout_state.stack.pop();
id
}
};
ctx.state.push(
ctx.document,
Box::new(Layout {
location: ctx.location.clone(),
layout: layout_type.clone(),
id,
token: layout_token,
properties: layout_properties,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings
}
fn register_layouts(&self, holder: &mut LayoutHolder) {
holder.insert(Rc::new(default_layouts::Centered::default()));
holder.insert(Rc::new(default_layouts::Split::default()));
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
#+LAYOUT_BEGIN[style=A] Split
A
#+LAYOUT_BEGIN[style=B] Centered
B
#+LAYOUT_END
#+LAYOUT_NEXT[style=C]
C
#+LAYOUT_BEGIN[style=D] Split
D
#+LAYOUT_NEXT[style=E]
E
#+LAYOUT_END
#+LAYOUT_END
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "A" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "B" };
};
Layout { token == LayoutToken::End, id == 1 };
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "C" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "D" };
};
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "E" };
};
Layout { token == LayoutToken::End, id == 2 };
Layout { token == LayoutToken::End, id == 2 };
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<nml.layout.push("begin", "Split", "style=A")>%
A
%<nml.layout.push("Begin", "Centered", "style=B")>%
B
%<nml.layout.push("end", "Centered", "")>%
%<nml.layout.push("next", "Split", "style=C")>%
C
%<nml.layout.push("Begin", "Split", "style=D")>%
D
%<nml.layout.push("Next", "Split", "style=E")>%
E
%<nml.layout.push("End", "Split", "")>%
%<nml.layout.push("End", "Split", "")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "A" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "B" };
};
Layout { token == LayoutToken::End, id == 1 };
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "C" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "D" };
};
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "E" };
};
Layout { token == LayoutToken::End, id == 2 };
Layout { token == LayoutToken::End, id == 2 };
);
}
}

View file

@ -1,77 +1,62 @@
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::util;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
#[derive(Debug)]
pub struct Link {
pub location: Token,
/// Display content of link
pub display: Vec<Box<dyn Element>>,
/// Url of link
pub url: String,
location: Token,
name: String, // Link name
url: String, // Link url
}
impl Link {
pub fn new(location: Token, name: String, url: String) -> Self {
Self {
location: location,
name,
url,
}
}
}
impl Element for Link {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Link" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let mut result = format!(
"<a href=\"{}\">",
Compiler::sanitize(compiler.target(), self.url.as_str())
);
for elem in &self.display {
result += elem.compile(compiler, document, cursor+result.len())?.as_str();
}
result += "</a>";
Ok(result)
}
_ => todo!(""),
Target::HTML => Ok(format!(
"<a href=\"{}\">{}</a>",
Compiler::sanitize(compiler.target(), self.url.as_str()),
Compiler::sanitize(compiler.target(), self.name.as_str()),
)),
Target::LATEX => Ok(format!(
"\\href{{{}}}{{{}}}",
Compiler::sanitize(compiler.target(), self.url.as_str()),
Compiler::sanitize(compiler.target(), self.name.as_str()),
)),
}
}
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
impl ContainerElement for Link {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.display }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.downcast_ref::<Link>().is_some() {
return Err("Tried to push a link inside of a link".to_string());
}
self.display.push(elem);
Ok(())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::link")]
pub struct LinkRule {
re: [Regex; 1],
}
@ -86,75 +71,54 @@ impl LinkRule {
impl RegexRule for LinkRule {
fn name(&self) -> &'static str { "Link" }
fn previous(&self) -> Option<&'static str> { Some("Link") }
fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match<'a>(
&self,
_: usize,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
parser: &dyn Parser,
document: &'a dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let link_display = match matches.get(1) {
Some(display) => {
if display.as_str().is_empty() {
reports.push(
Report::build(ReportKind::Error, token.source(), display.start())
let mut result = vec![];
let link_name = match matches.get(1) {
Some(name) => {
if name.as_str().is_empty() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty link name")
.with_label(
Label::new((token.source().clone(), display.range()))
Label::new((token.source().clone(), name.range()))
.with_message("Link name is empty")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
return reports;
return result;
}
let processed = util::process_escaped('\\', "]", display.as_str());
if processed.is_empty() {
reports.push(
Report::build(ReportKind::Error, token.source(), display.start())
// TODO: process into separate document...
let text_content = util::process_text(document, name.as_str());
if text_content.as_str().is_empty() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty link name")
.with_label(
Label::new((token.source(), display.range()))
Label::new((token.source(), name.range()))
.with_message(format!(
"Link name is empty. Once processed, `{}` yields `{}`",
display.as_str().fg(state.parser.colors().highlight),
processed.fg(state.parser.colors().highlight),
name.as_str().fg(parser.colors().highlight),
text_content.as_str().fg(parser.colors().highlight),
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
return reports;
}
let source = Rc::new(VirtualSource::new(
Token::new(display.range(), token.source()),
"Link Display".to_string(),
processed,
));
match util::parse_paragraph(state, source, document) {
Err(err) => {
reports.push(
Report::build(ReportKind::Error, token.source(), display.start())
.with_message("Failed to parse link display")
.with_label(
Label::new((token.source(), display.range()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
return result;
}
text_content
}
_ => panic!("Empty link name"),
};
@ -162,173 +126,50 @@ impl RegexRule for LinkRule {
let link_url = match matches.get(2) {
Some(url) => {
if url.as_str().is_empty() {
reports.push(
result.push(
Report::build(ReportKind::Error, token.source(), url.start())
.with_message("Empty link url")
.with_label(
Label::new((token.source(), url.range()))
.with_message("Link url is empty")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
return reports;
return result;
}
let text_content = util::process_text(document, url.as_str());
if text_content.is_empty() {
reports.push(
if text_content.as_str().is_empty() {
result.push(
Report::build(ReportKind::Error, token.source(), url.start())
.with_message("Empty link url")
.with_label(
Label::new((token.source(), url.range()))
.with_message(format!(
"Link url is empty. Once processed, `{}` yields `{}`",
url.as_str().fg(state.parser.colors().highlight),
text_content.as_str().fg(state.parser.colors().highlight),
url.as_str().fg(parser.colors().highlight),
text_content.as_str().fg(parser.colors().highlight),
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
return reports;
return result;
}
text_content
}
_ => panic!("Empty link url"),
};
state.push(
parser.push(
document,
Box::new(Link {
location: token,
display: link_display,
url: link_url,
}),
Box::new(Link::new(token.clone(), link_name, link_url)),
);
reports
return result;
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(|_, (display, url): (String, String)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let source = Rc::new(VirtualSource::new(
ctx.location.clone(),
"Link Display".to_string(),
display,
));
let display_content =
match util::parse_paragraph(ctx.state, source, ctx.document) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("display".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Failed to parse link display: {err}"
))),
});
return;
}
Ok(mut paragraph) => {
std::mem::take(&mut paragraph.content)
}
};
ctx.state.push(
ctx.document,
Box::new(Link {
location: ctx.location.clone(),
display: display_content,
url,
}),
);
})
});
result
})
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some [link](url).
[**BOLD link**](another url)
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "Some " };
Link { url == "url" } { Text { content == "link" }; };
Text { content == "." };
Link { url == "another url" } {
Style;
Text { content == "BOLD link" };
Style;
};
};
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some %<nml.link.push("link", "url")>%.
%<
nml.link.push("**BOLD link**", "another url")
>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "Some " };
Link { url == "url" } { Text { content == "link" }; };
Text { content == "." };
Link { url == "another url" } {
Style;
Text { content == "BOLD link" };
Style;
};
};
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -1,515 +1,341 @@
use std::any::Any;
use std::cell::Ref;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::{any::Any, cell::Ref, ops::Range, rc::Rc};
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::util;
use crate::parser::util::process_escaped;
use crate::parser::util::Property;
use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use regex::Match;
use crate::{compiler::compiler::{Compiler, Target}, document::{document::{Document, DocumentAccessors}, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token, VirtualSource}}};
use ariadne::{Label, Report, ReportKind};
use mlua::{Function, Lua};
use regex::Regex;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum MarkerKind {
Open,
Close,
}
#[derive(Debug)]
pub struct ListMarker {
pub(self) location: Token,
pub(self) numbered: bool,
pub(self) kind: MarkerKind,
}
impl Element for ListMarker {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "List Marker" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
match compiler.target() {
Target::HTML => match (self.kind, self.numbered) {
(MarkerKind::Close, true) => Ok("</ol>".to_string()),
(MarkerKind::Close, false) => Ok("</ul>".to_string()),
(MarkerKind::Open, true) => Ok("<ol>".to_string()),
(MarkerKind::Open, false) => Ok("<ul>".to_string()),
},
_ => todo!(),
}
}
}
use super::paragraph::Paragraph;
#[derive(Debug)]
pub struct ListEntry {
pub(self) location: Token,
pub(self) numbering: Vec<(bool, usize)>,
pub(self) content: Vec<Box<dyn Element>>,
pub(self) bullet: Option<String>,
location: Token,
numbering: Vec<(bool, usize)>,
content: Vec<Box<dyn Element>>,
// TODO bullet_maker : FnMut<...>
}
impl Element for ListEntry {
fn location(&self) -> &Token { &self.location }
impl ListEntry {
pub fn new(location: Token, numbering: Vec<(bool, usize)>, content: Vec<Box<dyn Element>>) -> Self {
Self { location, numbering, content }
}
}
fn kind(&self) -> ElemKind { ElemKind::Block }
#[derive(Debug)]
pub struct List
{
location: Token,
entries: Vec<ListEntry>
}
fn element_name(&self) -> &'static str { "List Entry" }
impl List
{
pub fn new(location: Token) -> Self
{
Self
{
location,
entries: Vec::new()
}
}
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
match compiler.target() {
pub fn push(&mut self, entry: ListEntry)
{
self.location.range = self.location.start()..entry.location.end();
self.entries.push(entry);
}
}
impl Element for List
{
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "List" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target()
{
Target::HTML => {
let mut result = String::new();
if let Some((numbered, number)) = self.numbering.last()
//TODO: Do something about indexing
let mut current_list: Vec<bool> = vec![];
let mut match_stack = |result: &mut String, target: &Vec<(bool, usize)>| {
// Find index after which current_list and target differ
let mut match_idx = 0usize;
for i in 0..current_list.len()
{
if i >= target.len() || current_list[i] != target[i].0 { break }
else { match_idx = i+1; }
}
// Close until same match
for _ in match_idx..current_list.len()
{
result.push_str(["</ul>", "</ol>"][current_list.pop().unwrap() as usize]);
}
// Open
for i in match_idx..target.len()
{
result.push_str(["<ul>", "<ol>"][target[i].0 as usize]);
current_list.push(target[i].0);
}
};
match self.entries.iter()
.try_for_each(|ent|
{
match_stack(&mut result, &ent.numbering);
result.push_str("<li>");
match ent.content.iter().enumerate()
.try_for_each(|(_idx, elem)| {
match elem.compile(compiler, document) {
Err(e) => Err(e),
Ok(s) => { result.push_str(s.as_str()); Ok(()) }
}
})
{
Err(e) => Err(e),
_ => {
result.push_str("</li>");
Ok(())
}
}
})
{
if *numbered {
result += format!("<li value=\"{number}\">").as_str();
}
else {
result += "<li>";
}
Err(e) => return Err(e),
_ => {}
}
for elem in &self.content {
result += elem.compile(compiler, document, cursor+result.len())?.as_str();
}
result += "</li>";
match_stack(&mut result, &Vec::<(bool, usize)>::new());
Ok(result)
}
_ => todo!(),
Target::LATEX => Err("Unimplemented compiler".to_string())
}
}
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
}
impl ContainerElement for ListEntry {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.kind() == ElemKind::Block {
return Err("Cannot add block element inside a list".to_string());
/*
impl Element for ListEntry
{
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "List" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler) -> Result<String, String> {
lazy_static! {
static ref STATE_NAME : &'static str = "list.state";
static ref LIST_OPEN : [&'static str; 2] = ["<ul>", "<ol>"];
static ref LIST_CLOSE : [&'static str; 2] = ["</ul>", "</ol>"];
}
self.content.push(elem);
Ok(())
// TODO: State.shouldpreserve?
// Called upon every element
//let state = compiler.get_state_mut::<ListState, _>(*STATE_NAME)
//.or_else(|| {
// compiler.insert_state(STATE_NAME.to_string(), Box::new(ListState(Vec::new())) as Box<dyn Any>);
// compiler.get_state_mut::<ListState, _>(*STATE_NAME)
//}).unwrap();
match compiler.target()
{
Target::HTML => {
let mut result = String::new();
//TODO: Do something about indexing
//&self.numbering.iter()
// .zip(&state.0)
// .for_each(|((wants_numbered, _), is_numbered)|
// {
//
// });
result.push_str("<li>");
match self.content.iter()
.try_for_each(|ent| match ent.compile(compiler) {
Err(e) => Err(e),
Ok(s) => Ok(result.push_str(s.as_str())),
})
{
Err(e) => return Err(e),
_ => {}
}
result.push_str("</li>");
//result.push_str(LIST_OPEN[self.numbered as usize]);
//self.entries.iter()
// .for_each(|(_index, entry)|
// result.push_str(format!("<li>{}</li>", compiler.compile(entry)).as_str()));
//result.push_str(LIST_CLOSE[self.numbered as usize]);
Ok(result)
}
Target::LATEX => Err("Unimplemented compiler".to_string())
}
}
}
*/
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::list")]
pub struct ListRule {
pub struct ListRule
{
start_re: Regex,
continue_re: Regex,
properties: PropertyParser,
continue_re: Regex
}
impl ListRule {
pub fn new() -> Self {
let mut props = HashMap::new();
props.insert(
"offset".to_string(),
Property::new(false, "Entry numbering offset".to_string(), None),
);
props.insert(
"bullet".to_string(),
Property::new(false, "Entry bullet".to_string(), None),
);
Self {
start_re: Regex::new(r"(?:^|\n)(?:[^\S\r\n]+)([*-]+)(?:\[((?:\\.|[^\\\\])*?)\])?(.*)")
.unwrap(),
continue_re: Regex::new(r"(?:^|\n)([^\S\r\n]+)([^\s].*)").unwrap(),
properties: PropertyParser { properties: props },
start_re: Regex::new(r"(?:^|\n)(?:[^\S\r\n]+)([*-]+).*").unwrap(),
continue_re: Regex::new(r"(?:^|\n)([^\S\r\n]+).*").unwrap(),
}
}
fn push_markers(
token: &Token,
state: &ParserState,
document: &dyn Document,
current: &Vec<(bool, usize)>,
target: &Vec<(bool, usize)>,
) {
let mut start_pos = 0;
for i in 0..std::cmp::min(target.len(), current.len()) {
if current[i].0 != target[i].0 {
break;
}
start_pos += 1;
}
// Close
for i in start_pos..current.len() {
state.push(
document,
Box::new(ListMarker {
location: token.clone(),
kind: MarkerKind::Close,
numbered: current[current.len() - 1 - (i - start_pos)].0,
}),
);
}
// Open
for i in start_pos..target.len() {
state.push(
document,
Box::new(ListMarker {
location: token.clone(),
kind: MarkerKind::Open,
numbered: target[i].0,
}),
);
}
}
fn parse_properties(&self, m: Match) -> Result<(Option<usize>, Option<String>), String> {
let processed = process_escaped('\\', "]", m.as_str());
let pm = self.properties.parse(processed.as_str())?;
let offset = match pm.get("offset", |_, s| s.parse::<usize>()) {
Ok((_, val)) => Some(val),
Err(err) => match err {
PropertyMapError::ParseError(err) => {
return Err(format!("Failed to parse `offset`: {err}"))
}
PropertyMapError::NotFoundError(_) => None,
},
};
let bullet = pm
.get("bullet", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
Ok((offset, bullet))
}
fn parse_depth(depth: &str, document: &dyn Document, offset: usize) -> Vec<(bool, usize)> {
fn parse_depth(depth: &str, document: &dyn Document) -> Vec<(bool, usize)>
{
let mut parsed = vec![];
let prev_entry = document
.last_element::<ListEntry>()
.and_then(|entry| Ref::filter_map(entry, |e| Some(&e.numbering)).ok());
// FIXME: Previous iteration used to recursively retrieve the list indent
let prev_entry = document.last_element::<List>()
.and_then(|list| Ref::filter_map(list, |m| m.entries.last() ).ok() )
.and_then(|entry| Ref::filter_map(entry, |e| Some(&e.numbering)).ok() );
let mut continue_match = true;
depth.chars().enumerate().for_each(|(idx, c)| {
let number = if offset == usize::MAX {
prev_entry
.as_ref()
.and_then(|v| {
if !continue_match {
return None;
}
let numbered = c == '-';
depth.chars().enumerate().for_each(|(idx, c)|
{
let number = prev_entry.as_ref()
.and_then(|v| {
if !continue_match { return None }
let numbered = c == '-';
match v.get(idx) {
None => None,
Some((prev_numbered, prev_idx)) => {
if *prev_numbered != numbered {
continue_match = false;
None
}
// New depth
else if idx + 1 == v.len() {
Some(prev_idx + 1)
}
// Increase from previous
else {
Some(*prev_idx)
} // Do nothing
}
match v.get(idx)
{
None => None,
Some((prev_numbered, prev_idx)) => {
if *prev_numbered != numbered { continue_match = false; None } // New depth
else if idx+1 == v.len() { Some(prev_idx+1) } // Increase from previous
else { Some(*prev_idx) } // Do nothing
}
})
.unwrap_or(1)
} else {
offset
};
}
})
.or(Some(0usize))
.unwrap();
match c {
match c
{
'*' => parsed.push((false, number)),
'-' => parsed.push((true, number)),
_ => panic!("Unimplemented"),
_ => panic!("Unimplemented")
}
});
parsed
return parsed;
}
}
impl Rule for ListRule {
impl Rule for ListRule
{
fn name(&self) -> &'static str { "List" }
fn previous(&self) -> Option<&'static str> { Some("Raw") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re.find_at(cursor.source.content(), cursor.pos)
.map_or(None,
|m| Some((m.start(), Box::new([false;0]) as Box<dyn Any>)) )
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a dyn Document<'a>,
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
fn on_match<'a>(&self, parser: &dyn Parser, document: &'a dyn Document<'a>, cursor: Cursor, _match_data: Option<Box<dyn Any>>)
-> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let content = cursor.source.content();
let mut end_cursor = cursor.clone();
loop {
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
let (end_cursor, numbering, source) = match self.start_re.captures_at(content, cursor.pos) {
None => panic!("Unknown error"),
Some(caps) => {
let mut end_pos = caps.get(0).unwrap().end();
// Properties
let mut offset = None;
let mut bullet = None;
if let Some(properties) = captures.get(2) {
match self.parse_properties(properties) {
Err(err) => {
reports.push(
Report::build(
ReportKind::Warning,
cursor.source.clone(),
properties.start(),
)
.with_message("Invalid List Entry Properties")
.with_label(
Label::new((cursor.source.clone(), properties.range()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
);
break;
}
Ok(props) => (offset, bullet) = props,
}
}
// Get bullet from previous entry if it exists
if bullet.is_none() {
bullet = document
.last_element::<ListEntry>()
.and_then(|prev| prev.bullet.clone())
}
// Depth
let depth = ListRule::parse_depth(
captures.get(1).unwrap().as_str(),
document,
offset.unwrap_or(usize::MAX),
);
// Content
let entry_start = captures.get(0).unwrap().start();
let mut entry_content = captures.get(3).unwrap().as_str().to_string();
let mut spacing: Option<(Range<usize>, &str)> = None;
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
// Break if next element is another entry
if captures.get(0).unwrap().start() != end_cursor.pos
|| captures
.get(2)
.unwrap()
.as_str()
.find(['*', '-'])
== Some(0)
let mut spacing = None; // Spacing used to continue list entry
loop {
// If another entry starts on the next line, don't continue matching
match self.next_match(&cursor.at(end_pos))
{
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
// Spacing
let current_spacing = captures.get(1).unwrap().as_str();
if let Some(spacing) = &spacing {
if spacing.1 != current_spacing {
reports.push(
Report::build(
ReportKind::Warning,
cursor.source.clone(),
captures.get(1).unwrap().start(),
)
.with_message("Invalid list entry spacing")
.with_label(
Label::new((
cursor.source.clone(),
captures.get(1).unwrap().range(),
))
.with_message("Spacing for list entries do not match")
.with_color(state.parser.colors().warning),
)
.with_label(
Label::new((cursor.source.clone(), spacing.0.clone()))
.with_message("Previous spacing")
.with_color(state.parser.colors().warning),
)
.finish(),
);
Some((pos, _)) => {
if pos == end_pos { break }
}
} else {
spacing = Some((captures.get(1).unwrap().range(), current_spacing));
None => {},
}
entry_content += " ";
entry_content += captures.get(2).unwrap().as_str();
// Continue matching as current entry
match self.continue_re.captures_at(content, end_pos) {
None => break,
Some(continue_caps) => {
if continue_caps.get(0).unwrap().start() != end_pos { break }
// Get the spacing
let cap_spacing = continue_caps.get(1).unwrap();
match &spacing {
None => spacing = Some(cap_spacing.range()),
Some(spacing) => 'some: {
if content[cap_spacing.range()] == content[spacing.clone()] { break 'some }
reports.push(
Report::build(ReportKind::Warning, cursor.source.clone(), continue_caps.get(1).unwrap().start())
.with_message("Invalid list entry spacing")
.with_label(
Label::new((cursor.source.clone(), cap_spacing.range()))
.with_message("Spacing for list entries must match")
.with_color(parser.colors().warning))
.with_label(
Label::new((cursor.source.clone(), spacing.clone()))
.with_message("Previous spacing")
.with_color(parser.colors().warning))
.finish());
},
}
end_pos = continue_caps.get(0).unwrap().end();
}
}
}
// Parse entry content
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
let entry_src = Rc::new(VirtualSource::new(
token.clone(),
let start_pos = caps.get(1).unwrap().end();
let source = VirtualSource::new(
Token::new(start_pos..end_pos, cursor.source.clone()),
"List Entry".to_string(),
entry_content,
));
let parsed_content = match util::parse_paragraph(state, entry_src, document) {
Err(err) => {
reports.push(
Report::build(ReportKind::Warning, token.source(), token.range.start)
.with_message("Unable to Parse List Entry")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
);
break;
}
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
};
if let Some(previous_depth) = document
.last_element::<ListEntry>()
.map(|ent| ent.numbering.clone())
{
ListRule::push_markers(&token, state, document, &previous_depth, &depth);
} else {
ListRule::push_markers(&token, state, document, &vec![], &depth);
}
state.push(
document,
Box::new(ListEntry {
location: Token::new(
entry_start..end_cursor.pos,
end_cursor.source.clone(),
),
numbering: depth,
content: parsed_content,
bullet,
}),
content.as_str()[start_pos..end_pos].to_string(),
);
} else {
break;
}
}
// Close all lists
let current = document
.last_element::<ListEntry>()
.map(|ent| ent.numbering.clone())
.unwrap();
let token = Token::new(end_cursor.pos..end_cursor.pos, end_cursor.source.clone());
ListRule::push_markers(&token, state, document, &current, &Vec::new());
(cursor.at(end_pos),
ListRule::parse_depth(caps.get(1).unwrap().as_str(), document),
source)
},
};
let parsed_entry = parser.parse(Rc::new(source), Some(document));
let mut parsed_paragraph = parsed_entry.last_element_mut::<Paragraph>().unwrap(); // Extract content from paragraph
let entry = ListEntry::new(
Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
numbering,
std::mem::replace(&mut parsed_paragraph.content, Vec::new())
);
// Ger previous list, if none insert a new list
let mut list = match document.last_element_mut::<List>()
{
Some(last) => last,
None => {
parser.push(document,
Box::new(List::new(
Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()))));
document.last_element_mut::<List>().unwrap()
}
};
list.push(entry);
(end_cursor, reports)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
* 1
*[offset=7] 2
continued
* 3
* New list
*-[bullet=(*)] A
*- B
* Back
*-* More nested
"#
.to_string(),
None,
));
let parser = LangParser::default();
let state = ParserState::new(&parser, None);
let (doc, _) = parser.parse(state, source, None);
validate_document!(doc.content().borrow(), 0,
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 1)] } {
Text { content == "1" };
};
ListEntry { numbering == vec![(false, 7)] } {
Text { content == "2 continued" };
};
ListEntry { numbering == vec![(false, 8)] } {
Text { content == "3" };
};
ListMarker { numbered == false, kind == MarkerKind::Close };
Paragraph;
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 1)] } {
Text { content == "New list" };
};
ListMarker { numbered == true, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 2), (true, 1)], bullet == Some("(*)".to_string()) } {
Text { content == "A" };
};
ListEntry { numbering == vec![(false, 2), (true, 2)], bullet == Some("(*)".to_string()) } {
Text { content == "B" };
};
ListMarker { numbered == true, kind == MarkerKind::Close };
ListEntry { numbering == vec![(false, 2)] } {
Text { content == "Back" };
};
ListMarker { numbered == true, kind == MarkerKind::Open };
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 3), (true, 1), (false, 1)] } {
Text { content == "More nested" };
};
ListMarker { numbered == false, kind == MarkerKind::Close };
ListMarker { numbered == true, kind == MarkerKind::Close };
ListMarker { numbered == false, kind == MarkerKind::Close };
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -21,7 +21,7 @@ use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::document::element::ReferenceableElement;
use crate::document::references::validate_refname;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
@ -35,7 +35,7 @@ use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser;
use super::paragraph::Paragraph;
use super::reference::InternalReference;
use super::reference::Reference;
#[derive(Debug, PartialEq, Eq)]
pub enum MediaType {
@ -70,16 +70,21 @@ impl Element for Media {
fn element_name(&self) -> &'static str { "Media" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let mut result = String::new();
result.push_str("<div class=\"media\">");
for medium in &self.media {
result += medium.compile(compiler, document, cursor+result.len())?.as_str();
match medium.compile(compiler, document) {
Ok(r) => result.push_str(r.as_str()),
Err(e) => return Err(e),
}
}
result.push_str("</div>");
@ -130,25 +135,25 @@ impl Element for Medium {
fn element_name(&self) -> &'static str { "Medium" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let mut result = String::new();
// Reference
let elemref = document.get_reference(self.reference.as_str()).unwrap();
let refcount = compiler.reference_id(document, elemref);
let width = self
.width
.as_ref()
.map_or(String::new(), |w| format!(r#" style="width:{w};""#));
result.push_str(format!(r#"<div id="{}" class="medium"{width}>"#, self.refid(compiler, refcount)).as_str());
result.push_str(format!(r#"<div class="medium"{width}>"#).as_str());
result += match self.media_type {
MediaType::IMAGE => format!(r#"<a href="{0}"><img src="{0}"></a>"#, self.uri),
MediaType::VIDEO => format!(r#"<video controls{width}><source src="{0}"></video>"#, self.uri
MediaType::VIDEO => format!(
r#"<video controls{width}><source src="{0}"></video>"#,
self.uri
),
MediaType::AUDIO => {
format!(r#"<audio controls src="{0}"{width}></audio>"#, self.uri)
@ -158,17 +163,26 @@ impl Element for Medium {
let caption = self
.caption
.as_ref().map(|cap| format!(
.as_ref()
.and_then(|cap| {
Some(format!(
" {}",
Compiler::sanitize(compiler.target(), cap.as_str())
))
.unwrap_or_default();
})
.unwrap_or(String::new());
// Reference
let elemref = document.get_reference(self.reference.as_str()).unwrap();
let refcount = compiler.reference_id(document, elemref);
result.push_str(
format!(r#"<p class="medium-refname">({refcount}){caption}</p>"#).as_str(),
);
if let Some(paragraph) = self.description.as_ref() {
result += paragraph.compile(compiler, document, cursor+result.len())?.as_str();
match paragraph.compile(compiler, document) {
Ok(res) => result.push_str(res.as_str()),
Err(err) => return Err(err),
}
}
result.push_str("</div>");
@ -188,7 +202,7 @@ impl ReferenceableElement for Medium {
&self,
compiler: &Compiler,
_document: &dyn Document,
reference: &InternalReference,
reference: &Reference,
refid: usize,
) -> Result<String, String> {
match compiler.target() {
@ -200,11 +214,7 @@ impl ReferenceableElement for Medium {
// TODO Handle other kind of media
match self.media_type {
MediaType::IMAGE => Ok(format!(
"<a class=\"medium-ref\" href=\"#medium-{refid}\">{caption}<img src=\"{}\"></a>",
self.uri
)),
MediaType::VIDEO => Ok(format!(
"<a class=\"medium-ref\" href=\"#medium-{refid}\">{caption}<video><source src=\"{0}\"></video></a>",
r#"<a class="medium-ref">{caption}<img src="{}"></a>"#,
self.uri
)),
_ => todo!(""),
@ -213,13 +223,8 @@ impl ReferenceableElement for Medium {
_ => todo!(""),
}
}
fn refid(&self, _compiler: &Compiler, refid: usize) -> String {
format!("medium-{refid}")
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::media")]
pub struct MediaRule {
re: [Regex; 1],
properties: PropertyParser,
@ -251,7 +256,7 @@ impl MediaRule {
.multi_line(true)
.build()
.unwrap()],
properties: PropertyParser { properties: props },
properties: PropertyParser::new(props),
}
}
@ -324,14 +329,13 @@ impl MediaRule {
impl RegexRule for MediaRule {
fn name(&self) -> &'static str { "Media" }
fn previous(&self) -> Option<&'static str> { Some("Graphviz") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>(
&self,
_: usize,
state: &ParserState,
parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a),
token: Token,
matches: Captures,
@ -375,8 +379,7 @@ impl RegexRule for MediaRule {
};
// Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3))
{
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
Ok(pm) => pm,
Err(report) => {
reports.push(report);
@ -400,10 +403,10 @@ impl RegexRule for MediaRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Property `type: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)
prop.fg(parser.colors().info),
err.fg(parser.colors().error)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -419,7 +422,7 @@ impl RegexRule for MediaRule {
token.start() + 1..token.end(),
))
.with_message(format!("{err}. Required because mediatype could not be detected"))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -433,13 +436,15 @@ impl RegexRule for MediaRule {
.get("width", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok().map(|(_, s)| s);
.ok()
.and_then(|(_, s)| Some(s));
let caption = properties
.get("caption", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok().map(|(_, value)| value);
.ok()
.and_then(|(_, value)| Some(value));
let description = match matches.get(4) {
Some(content) => {
@ -451,7 +456,7 @@ impl RegexRule for MediaRule {
if source.content().is_empty() {
None
} else {
match parse_paragraph(state, source, document) {
match parse_paragraph(parser, source, document) {
Ok(paragraph) => Some(*paragraph),
Err(err) => {
reports.push(
@ -462,7 +467,7 @@ impl RegexRule for MediaRule {
.with_message(format!(
"Could not parse description: {err}"
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -477,7 +482,7 @@ impl RegexRule for MediaRule {
let mut group = match document.last_element_mut::<Media>() {
Some(group) => group,
None => {
state.push(
parser.push(
document,
Box::new(Media {
location: token.clone(),
@ -504,7 +509,7 @@ impl RegexRule for MediaRule {
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(err)
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -512,12 +517,15 @@ impl RegexRule for MediaRule {
reports
}
fn lua_bindings<'lua>(&self, _lua: &'lua mlua::Lua) -> Vec<(String, mlua::Function<'lua>)> {
vec![]
}
}
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use super::*;
@ -546,7 +554,7 @@ mod tests {
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow();
let group = borrow.first().as_ref().unwrap().as_container().unwrap();

View file

@ -1,20 +1,17 @@
pub mod code;
pub mod comment;
pub mod graphviz;
pub mod import;
pub mod layout;
pub mod link;
pub mod list;
pub mod media;
pub mod paragraph;
pub mod raw;
pub mod reference;
pub mod script;
pub mod section;
pub mod style;
pub mod tex;
pub mod registrar;
pub mod text;
pub mod comment;
pub mod paragraph;
pub mod variable;
pub mod elemstyle;
pub mod customstyle;
pub mod blockquote;
pub mod import;
pub mod script;
pub mod list;
pub mod style;
pub mod section;
pub mod link;
pub mod code;
pub mod tex;
pub mod graphviz;
pub mod raw;
pub mod media;
pub mod reference;

View file

@ -3,6 +3,8 @@ use std::ops::Range;
use std::rc::Rc;
use ariadne::Report;
use mlua::Function;
use mlua::Lua;
use regex::Regex;
use crate::compiler::compiler::Compiler;
@ -11,7 +13,7 @@ use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
@ -48,28 +50,43 @@ impl Element for Paragraph {
fn element_name(&self) -> &'static str { "Paragraph" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{:#?}", self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
if self.content.is_empty() {
return Ok(String::new());
}
match compiler.target() {
Target::HTML => {
if self.content.is_empty() {
return Ok(String::new());
}
let mut result = String::new();
result.push_str("<p>");
//if prev.is_none() || prev.unwrap().downcast_ref::<Paragraph>().is_none()
{
result.push_str("<p>");
}
//else
//{ result.push_str(" "); }
for elems in &self.content {
result += elems.compile(compiler, document, cursor+result.len())?.as_str();
let err = self.content.iter().try_for_each(|elem| {
match elem.compile(compiler, document) {
Err(e) => return Err(e),
Ok(content) => {
result.push_str(content.as_str());
Ok(())
}
}
});
//if next.is_none() || next.unwrap().downcast_ref::<Paragraph>().is_none()
{
result.push_str("</p>");
}
result.push_str("</p>");
Ok(result)
match err {
Err(e) => Err(e),
Ok(()) => Ok(result),
}
}
_ => todo!("Unimplemented compiler"),
Target::LATEX => todo!("Unimplemented compiler"),
}
}
@ -83,15 +100,11 @@ impl ContainerElement for Paragraph {
if elem.location().source() == self.location().source() {
self.location.range = self.location.start()..elem.location().end();
}
if elem.kind() == ElemKind::Block {
return Err("Attempted to push block element inside a paragraph".to_string());
}
self.content.push(elem);
Ok(())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::paragraph")]
pub struct ParagraphRule {
re: Regex,
}
@ -105,27 +118,27 @@ impl ParagraphRule {
}
impl Rule for ParagraphRule {
fn name(&self) -> &'static str { "Paragraph" }
fn previous(&self) -> Option<&'static str> { Some("Comment") }
fn name(&self) -> &'static str { "Paragraphing" }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
.find_at(cursor.source.content(), cursor.pos)
.and_then(|m| Some((m.start(), Box::new([false; 0]) as Box<dyn Any>)))
}
fn on_match(
&self,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
cursor: Cursor,
_match_data: Box<dyn Any>,
_match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let end_cursor = match self.re.captures_at(cursor.source.content(), cursor.pos) {
None => panic!("Unknown error"),
Some(capture) => cursor.at(capture.get(0).unwrap().end() - 1),
};
state.push(
parser.push(
document,
Box::new(Paragraph {
location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
@ -135,49 +148,7 @@ impl Rule for ParagraphRule {
(end_cursor, Vec::new())
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parse() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
First paragraph
Second line
Second paragraph\
<- literal \\n
Last paragraph
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "First paragraph Second line" };
};
Paragraph {
Text { content == "Second paragraph\n<- literal \\n" };
};
Paragraph {
Text { content == "Last paragraph " };
};
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -3,7 +3,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
@ -27,10 +27,20 @@ use std::str::FromStr;
use std::sync::Arc;
#[derive(Debug)]
pub struct Raw {
pub location: Token,
pub kind: ElemKind,
pub content: String,
struct Raw {
pub(self) location: Token,
pub(self) kind: ElemKind,
pub(self) content: String,
}
impl Raw {
fn new(location: Token, kind: ElemKind, content: String) -> Self {
Self {
location,
kind,
content,
}
}
}
impl Element for Raw {
@ -39,12 +49,13 @@ impl Element for Raw {
fn element_name(&self) -> &'static str { "Raw" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(self.content.clone())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::raw")]
pub struct RawRule {
re: [Regex; 1],
properties: PropertyParser,
@ -66,21 +77,20 @@ impl RawRule {
Regex::new(r"\{\?(?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)(\?\}))?")
.unwrap(),
],
properties: PropertyParser { properties: props },
properties: PropertyParser::new(props),
}
}
}
impl RegexRule for RawRule {
fn name(&self) -> &'static str { "Raw" }
fn previous(&self) -> Option<&'static str> { Some("Variable Substitution") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
_index: usize,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
token: Token,
matches: Captures,
@ -97,10 +107,10 @@ impl RegexRule for RawRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Missing terminating `{}` after first `{}`",
"?}".fg(state.parser.colors().info),
"{?".fg(state.parser.colors().info)
"?}".fg(parser.colors().info),
"{?".fg(parser.colors().info)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -117,7 +127,7 @@ impl RegexRule for RawRule {
.with_label(
Label::new((token.source().clone(), content.range()))
.with_message("Raw code is empty")
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -136,7 +146,7 @@ impl RegexRule for RawRule {
.with_label(
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Raw code is missing properties: {e}"))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -154,7 +164,7 @@ impl RegexRule for RawRule {
.with_label(
Label::new((token.source().clone(), props.range()))
.with_message(e)
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -178,10 +188,10 @@ impl RegexRule for RawRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Property `kind: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)
prop.fg(parser.colors().info),
err.fg(parser.colors().error)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -198,9 +208,9 @@ impl RegexRule for RawRule {
))
.with_message(format!(
"Property `{}` is missing",
err.fg(state.parser.colors().info)
err.fg(parser.colors().info)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -209,7 +219,7 @@ impl RegexRule for RawRule {
},
};
state.push(
parser.push(
document,
Box::new(Raw {
location: token.clone(),
@ -221,7 +231,7 @@ impl RegexRule for RawRule {
reports
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
@ -244,7 +254,7 @@ impl RegexRule for RawRule {
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Raw {
location: ctx.location.clone(),
@ -267,56 +277,31 @@ impl RegexRule for RawRule {
#[cfg(test)]
mod tests {
use super::*;
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::compiler::compiler::Target;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use crate::parser::source::SourceFile;
#[test]
fn parser() {
fn raw_tests() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Break{?[kind=block] Raw?}NewParagraph{?<b>?}
Break{?[kind=block]<RAW>?}NewParagraph
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph;
Raw { kind == ElemKind::Block, content == "Raw" };
Paragraph {
Text;
Raw { kind == ElemKind::Inline, content == "<b>" };
};
);
}
let borrow = doc.content().borrow();
let found = borrow
.iter()
.filter_map(|e| e.downcast_ref::<Raw>())
.collect::<Vec<_>>();
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Break%<nml.raw.push("block", "Raw")>%NewParagraph%<nml.raw.push("inline", "<b>")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph;
Raw { kind == ElemKind::Block, content == "Raw" };
Paragraph {
Text;
Raw { kind == ElemKind::Inline, content == "<b>" };
};
);
assert_eq!(found[0].compile(&compiler, &*doc), Ok("<RAW>".to_string()));
//assert_eq!(found[1].compile(&compiler, &*doc), Ok("<RAW>".to_string()));
}
}

View file

@ -2,67 +2,54 @@ use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use reference_style::ExternalReferenceStyle;
use regex::Captures;
use regex::Match;
use regex::Regex;
use runtime_format::FormatArgs;
use runtime_format::FormatKey;
use runtime_format::FormatKeyError;
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::CrossReference;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::document::references::validate_refname;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::style::StyleHolder;
use crate::parser::util;
use crate::parser::util::Property;
use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyParser;
#[derive(Debug)]
pub struct InternalReference {
pub struct Reference {
pub(self) location: Token,
pub(self) refname: String,
pub(self) caption: Option<String>,
}
impl InternalReference {
impl Reference {
pub fn caption(&self) -> Option<&String> { self.caption.as_ref() }
}
impl Element for InternalReference {
impl Element for Reference {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Reference" }
fn compile(
&self,
compiler: &Compiler,
document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let elemref = document
.get_reference(self.refname.as_str())
.ok_or(format!(
"Unable to find reference `{}` in current document",
self.refname
))?;
let elemref = document.get_reference(self.refname.as_str()).unwrap();
let elem = document.get_from_reference(&elemref).unwrap();
elem.compile_reference(
@ -77,89 +64,6 @@ impl Element for InternalReference {
}
}
#[derive(Debug)]
pub struct ExternalReference {
pub(self) location: Token,
pub(self) reference: CrossReference,
pub(self) caption: Option<String>,
pub(self) style: Rc<reference_style::ExternalReferenceStyle>,
}
struct FmtPair<'a>(Target, &'a ExternalReference);
impl FormatKey for FmtPair<'_> {
fn fmt(&self, key: &str, f: &mut std::fmt::Formatter<'_>) -> Result<(), FormatKeyError> {
match &self.1.reference {
CrossReference::Unspecific(refname) => match key {
"refname" => write!(f, "{}", Compiler::sanitize(self.0, refname))
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
},
CrossReference::Specific(refdoc, refname) => match key {
"refdoc" => {
write!(f, "{}", Compiler::sanitize(self.0, refdoc)).map_err(FormatKeyError::Fmt)
}
"refname" => write!(f, "{}", Compiler::sanitize(self.0, refname))
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
},
}
}
}
impl Element for ExternalReference {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Reference" }
fn compile(
&self,
compiler: &Compiler,
_document: &dyn Document,
cursor: usize,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let mut result = "<a href=\"".to_string();
// Link position
let crossreference_pos = cursor + result.len();
if let Some(caption) = &self.caption {
result +=
format!("\">{}</a>", Compiler::sanitize(Target::HTML, caption)).as_str();
} else {
// Use style
let fmt_pair = FmtPair(compiler.target(), self);
let format_string = match &self.reference {
CrossReference::Unspecific(_) => Compiler::sanitize_format(
fmt_pair.0,
self.style.format_unspecific.as_str(),
),
CrossReference::Specific(_, _) => Compiler::sanitize_format(
fmt_pair.0,
self.style.format_specific.as_str(),
),
};
let args = FormatArgs::new(format_string.as_str(), &fmt_pair);
args.status().map_err(|err| {
format!("Failed to format ExternalReference style `{format_string}`: {err}")
})?;
result += format!("\">{}</a>", args.to_string()).as_str();
}
// Add crossreference
compiler.insert_crossreference(crossreference_pos, self.reference.clone());
Ok(result)
}
_ => todo!(""),
}
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::reference")]
pub struct ReferenceRule {
re: [Regex; 1],
properties: PropertyParser,
@ -177,8 +81,8 @@ impl ReferenceRule {
),
);
Self {
re: [Regex::new(r"§\{(.*?)\}(\[((?:\\.|[^\\\\])*?)\])?").unwrap()],
properties: PropertyParser { properties: props },
re: [Regex::new(r"§\{(.*)\}(\[((?:\\.|[^\\\\])*?)\])?").unwrap()],
properties: PropertyParser::new(props),
}
}
@ -225,67 +129,56 @@ impl ReferenceRule {
impl RegexRule for ReferenceRule {
fn name(&self) -> &'static str { "Reference" }
fn previous(&self) -> Option<&'static str> { Some("Text") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>(
&self,
_: usize,
state: &ParserState,
parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a),
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let (refdoc, refname) = if let Some(refname_match) = matches.get(1) {
if let Some(sep) = refname_match.as_str().find('#')
// External reference
{
let refdoc = refname_match.as_str().split_at(sep).0;
match validate_refname(document, refname_match.as_str().split_at(sep + 1).1, false)
{
Err(err) => {
reports.push(
Report::build(ReportKind::Error, token.source(), refname_match.start())
.with_message("Invalid Reference Refname")
.with_label(
Label::new((token.source().clone(), refname_match.range()))
.with_message(err),
)
.finish(),
);
return reports;
}
Ok(refname) => (Some(refdoc.to_string()), refname.to_string()),
}
} else
// Internal reference
{
match validate_refname(document, refname_match.as_str(), false) {
Err(err) => {
reports.push(
Report::build(ReportKind::Error, token.source(), refname_match.start())
.with_message("Invalid Reference Refname")
.with_label(
Label::new((token.source().clone(), refname_match.range()))
.with_message(err),
)
.finish(),
);
return reports;
}
Ok(refname) => (None, refname.to_string()),
let refname = match (
matches.get(1).unwrap(),
validate_refname(document, matches.get(1).unwrap().as_str(), false),
) {
(m, Ok(refname)) => {
if document.get_reference(refname).is_none() {
reports.push(
Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Uknown Reference Refname")
.with_label(
Label::new((token.source().clone(), m.range())).with_message(
format!(
"Could not find element with reference: `{}`",
refname.fg(parser.colors().info)
),
),
)
.finish(),
);
return reports;
}
refname.to_string()
}
(m, Err(err)) => {
reports.push(
Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Invalid Reference Refname")
.with_label(
Label::new((token.source().clone(), m.range())).with_message(err),
)
.finish(),
);
return reports;
}
} else {
panic!("Unknown error")
};
// Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3))
{
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
Ok(pm) => pm,
Err(report) => {
reports.push(report);
@ -298,193 +191,21 @@ impl RegexRule for ReferenceRule {
Ok(value.clone())
})
.ok()
.map(|(_, s)| s);
.and_then(|(_, s)| Some(s));
if let Some(refdoc) = refdoc {
// Get style
let style = state
.shared
.styles
.borrow()
.current(reference_style::STYLE_KEY)
.downcast_rc::<reference_style::ExternalReferenceStyle>()
.unwrap();
// §{#refname}
if refdoc.is_empty() {
state.push(
document,
Box::new(ExternalReference {
location: token,
reference: CrossReference::Unspecific(refname),
caption,
style,
}),
);
// §{docname#refname}
} else {
state.push(
document,
Box::new(ExternalReference {
location: token,
reference: CrossReference::Specific(refdoc, refname),
caption,
style,
}),
);
}
} else {
state.push(
document,
Box::new(InternalReference {
location: token,
refname,
caption,
}),
);
}
parser.push(
document,
Box::new(Reference {
location: token,
refname,
caption,
}),
);
reports
}
fn register_styles(&self, holder: &mut StyleHolder) {
holder.set_current(Rc::new(ExternalReferenceStyle::default()));
}
}
mod reference_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.external_reference";
#[derive(Debug, Serialize, Deserialize)]
pub struct ExternalReferenceStyle {
pub format_unspecific: String,
pub format_specific: String,
}
impl Default for ExternalReferenceStyle {
fn default() -> Self {
Self {
format_unspecific: "(#{refname})".into(),
format_specific: "({refdoc}#{refname})".into(),
}
}
}
impl_elementstyle!(ExternalReferenceStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::compiler::process::process_from_memory;
use crate::elements::paragraph::Paragraph;
use crate::elements::section::Section;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parse_internal() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
#{ref} Referenceable section
§{ref}[caption=Section]
§{ref}[caption=Another]
§{ref2}[caption=Before]
#{ref2} Another section
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section;
Paragraph {
InternalReference { refname == "ref", caption == Some("Section".to_string()) };
InternalReference { refname == "ref", caption == Some("Another".to_string()) };
InternalReference { refname == "ref2", caption == Some("Before".to_string()) };
};
Paragraph;
Section;
);
}
#[test]
pub fn parse_external() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
§{DocA#ref}[caption=Section]
§{DocB#ref}
§{#ref}[caption='ref' from any document]
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
ExternalReference { reference == CrossReference::Specific("DocA".into(), "ref".into()), caption == Some("Section".to_string()) };
ExternalReference { reference == CrossReference::Specific("DocB".into(), "ref".into()), caption == None::<String> };
ExternalReference { reference == CrossReference::Unspecific("ref".into()), caption == Some("'ref' from any document".to_string()) };
};
);
}
#[test]
pub fn test_external() {
let result = process_from_memory(
Target::HTML,
vec![
r#"
@html.page_title = 0
@compiler.output = a.html
#{ref} Referenceable section
"#
.into(),
r#"
@html.page_title = 1
@compiler.output = b.html
§{#ref}
§{a#ref}
#{ref2} Another Referenceable section
"#
.into(),
r#"
@html.page_title = 2
@@style.external_reference = {
"format_unspecific": "[UNSPECIFIC {refname}]",
"format_specific": "[SPECIFIC {refdoc}:{refname}]"
}
§{#ref}[caption=from 0]
§{#ref}
§{#ref2}[caption=from 1]
§{b#ref2}
"#
.into(),
],
)
.unwrap();
assert!(result[1].0.borrow().body.starts_with("<div class=\"content\"><p><a href=\"a.html#Referenceable_section\">(#ref)</a><a href=\"a.html#Referenceable_section\">(a#ref)</a></p>"));
assert!(result[2].0.borrow().body.starts_with("<div class=\"content\"><p><a href=\"a.html#Referenceable_section\">from 0</a><a href=\"a.html#Referenceable_section\">[UNSPECIFIC ref]</a><a href=\"b.html#Another_Referenceable_section\">from 1</a><a href=\"b.html#Another_Referenceable_section\">[SPECIFIC b:ref2]</a></p>"));
fn lua_bindings<'lua>(&self, _lua: &'lua mlua::Lua) -> Vec<(String, mlua::Function<'lua>)> {
vec![]
}
}

40
src/elements/registrar.rs Normal file
View file

@ -0,0 +1,40 @@
use crate::parser::parser::Parser;
use super::code::CodeRule;
use super::comment::CommentRule;
use super::graphviz::GraphRule;
use super::import::ImportRule;
use super::link::LinkRule;
use super::list::ListRule;
use super::media::MediaRule;
use super::paragraph::ParagraphRule;
use super::raw::RawRule;
use super::script::ScriptRule;
use super::section::SectionRule;
use super::style::StyleRule;
use super::tex::TexRule;
use super::text::TextRule;
use super::variable::VariableRule;
use super::variable::VariableSubstitutionRule;
use super::reference::ReferenceRule;
pub fn register<P: Parser>(parser: &mut P) {
parser.add_rule(Box::new(CommentRule::new()), None).unwrap();
parser.add_rule(Box::new(ParagraphRule::new()), None).unwrap();
parser.add_rule(Box::new(ImportRule::new()), None).unwrap();
parser.add_rule(Box::new(ScriptRule::new()), None).unwrap();
parser.add_rule(Box::new(VariableRule::new()), None).unwrap();
parser.add_rule(Box::new(VariableSubstitutionRule::new()), None).unwrap();
parser.add_rule(Box::new(RawRule::new()), None).unwrap();
parser.add_rule(Box::new(ListRule::new()), None).unwrap();
parser.add_rule(Box::new(CodeRule::new()), None).unwrap();
parser.add_rule(Box::new(TexRule::new()), None).unwrap();
parser.add_rule(Box::new(GraphRule::new()), None).unwrap();
parser.add_rule(Box::new(MediaRule::new()), None).unwrap();
parser.add_rule(Box::new(StyleRule::new()), None).unwrap();
parser.add_rule(Box::new(SectionRule::new()), None).unwrap();
parser.add_rule(Box::new(LinkRule::new()), None).unwrap();
parser.add_rule(Box::new(TextRule::default()), None).unwrap();
parser.add_rule(Box::new(ReferenceRule::new()), None).unwrap();
}

View file

@ -1,7 +1,7 @@
use crate::document::document::Document;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelContext;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
@ -12,6 +12,7 @@ use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Regex;
@ -20,7 +21,6 @@ use std::rc::Rc;
use super::text::Text;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::script")]
pub struct ScriptRule {
re: [Regex; 2],
eval_kinds: [(&'static str, &'static str); 3],
@ -78,14 +78,13 @@ impl ScriptRule {
impl RegexRule for ScriptRule {
fn name(&self) -> &'static str { "Script" }
fn previous(&self) -> Option<&'static str> { Some("Import") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>(
&self,
index: usize,
state: &ParserState,
parser: &dyn Parser,
document: &'a dyn Document<'a>,
token: Token,
matches: Captures,
@ -94,33 +93,26 @@ impl RegexRule for ScriptRule {
let kernel_name = match matches.get(1) {
None => "main".to_string(),
Some(name) => {
match ScriptRule::validate_kernel_name(state.parser.colors(), name.as_str()) {
Ok(name) => name,
Err(e) => {
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid kernel name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(e)
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(name) => match ScriptRule::validate_kernel_name(parser.colors(), name.as_str()) {
Ok(name) => name,
Err(e) => {
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid kernel name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(e)
.with_color(parser.colors().error),
)
.finish(),
);
return reports;
}
}
};
let mut kernels_borrow = state.shared.kernels.borrow_mut();
let kernel = match kernels_borrow.get(kernel_name.as_str()) {
Some(kernel) => kernel,
None => {
kernels_borrow.insert(kernel_name.clone(), Kernel::new(state.parser));
kernels_borrow.get(kernel_name.as_str()).unwrap()
}
},
};
let kernel = parser
.get_kernel(kernel_name.as_str())
.unwrap_or_else(|| parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser)));
let kernel_data = matches
.get(if index == 0 { 2 } else { 3 })
@ -135,7 +127,7 @@ impl RegexRule for ScriptRule {
.with_label(
Label::new((token.source(), token.start() + 1..token.end()))
.with_message("Kernel code is empty")
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -172,9 +164,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len()))
.with_message(format!(
"Kernel execution failed:\n{}",
e
e.to_string()
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -186,7 +178,7 @@ impl RegexRule for ScriptRule {
// Validate kind
let kind = match matches.get(2) {
None => 0,
Some(kind) => match self.validate_kind(state.parser.colors(), kind.as_str()) {
Some(kind) => match self.validate_kind(parser.colors(), kind.as_str()) {
Ok(kind) => kind,
Err(msg) => {
reports.push(
@ -195,7 +187,7 @@ impl RegexRule for ScriptRule {
.with_label(
Label::new((token.source(), kind.range()))
.with_message(msg)
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -215,9 +207,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len()))
.with_message(format!(
"Kernel evaluation failed:\n{}",
e
e.to_string()
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -231,7 +223,7 @@ impl RegexRule for ScriptRule {
// Eval to text
{
if !result.is_empty() {
state.push(
parser.push(
document,
Box::new(Text::new(
Token::new(1..source.content().len(), source.clone()),
@ -248,11 +240,7 @@ impl RegexRule for ScriptRule {
result,
)) as Rc<dyn Source>;
state.with_state(|new_state| {
new_state
.parser
.parse_into(new_state, parse_source, document);
})
parser.parse_into(parse_source, document);
}
}
Err(e) => {
@ -263,9 +251,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len()))
.with_message(format!(
"Kernel evaluation failed:\n{}",
e
e.to_string()
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -279,69 +267,13 @@ impl RegexRule for ScriptRule {
let ctx = KernelContext {
location: Token::new(0..source.content().len(), source.clone()),
state,
parser,
document,
};
kernel.run_with_context(ctx, execute)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::elements::link::Link;
use crate::elements::list::ListEntry;
use crate::elements::list::ListMarker;
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Simple evals:
* %< 1+1>%
* %<" 1+1>% = 2
* %<! "**bold**">%
Definition:
@<
function make_ref(name, ref)
return "[" .. name .. "](#" .. ref .. ")"
end
>@
Evaluation: %<! make_ref("hello", "id")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph;
ListMarker;
ListEntry {};
ListEntry {
Text { content == "2" };
Text { content == " = 2" };
};
ListEntry {
Style;
Text { content == "bold" };
Style;
};
ListMarker;
Paragraph {
Text; Text;
Link { url == "#id" } { Text { content == "hello" }; };
};
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -5,11 +5,10 @@ use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::document::element::ReferenceableElement;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::style::StyleHolder;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
@ -18,91 +17,35 @@ use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use regex::Regex;
use section_style::SectionLinkPos;
use section_style::SectionStyle;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use super::reference::InternalReference;
#[derive(Debug)]
pub struct Section {
pub(self) location: Token,
/// Title of the section
pub(self) title: String,
/// Depth i.e number of '#'
pub(self) depth: usize,
/// [`section_kind`]
pub(self) kind: u8,
/// Section reference name
pub(self) reference: Option<String>,
/// Style of the section
pub(self) style: Rc<section_style::SectionStyle>,
pub(self) title: String, // Section title
pub(self) depth: usize, // Section depth
pub(self) kind: u8, // Section kind, e.g numbered, unnumbred, ...
pub(self) reference: Option<String>, // Section reference name
}
impl Element for Section {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Section" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
// Section numbering
let number = if (self.kind & section_kind::NO_NUMBER) != section_kind::NO_NUMBER {
let numbering = compiler.section_counter(self.depth);
let mut result = String::new();
for num in numbering.iter() {
result = result + num.to_string().as_str() + ".";
}
result += " ";
result
} else {
String::new()
};
if self.style.link_pos == SectionLinkPos::None {
return Ok(format!(
r#"<h{0} id="{1}">{number}{2}</h{0}>"#,
self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str())
));
}
let refname = Compiler::refname(compiler.target(), self.title.as_str());
let link = format!(
"{}<a class=\"section-link\" href=\"#{refname}\">{}</a>{}",
Compiler::sanitize(compiler.target(), self.style.link[0].as_str()),
Compiler::sanitize(compiler.target(), self.style.link[1].as_str()),
Compiler::sanitize(compiler.target(), self.style.link[2].as_str())
);
if self.style.link_pos == SectionLinkPos::After {
Ok(format!(
r#"<h{0} id="{1}">{number}{2}{link}</h{0}>"#,
self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str())
))
} else
// Before
{
Ok(format!(
r#"<h{0} id="{1}">{link}{number}{2}</h{0}>"#,
self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str())
))
}
}
Target::HTML => Ok(format!(
"<h{0}>{1}</h{0}>",
self.depth,
Compiler::sanitize(compiler.target(), self.title.as_str())
)),
Target::LATEX => Err("Unimplemented compiler".to_string()),
}
}
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
}
impl ReferenceableElement for Section {
@ -113,35 +56,14 @@ impl ReferenceableElement for Section {
fn compile_reference(
&self,
compiler: &Compiler,
_document: &dyn Document,
reference: &InternalReference,
_refid: usize,
document: &dyn Document,
reference: &super::reference::Reference,
refid: usize,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let caption = reference.caption().map_or(
format!(
"({})",
Compiler::sanitize(compiler.target(), self.title.as_str())
),
|cap| cap.clone(),
);
Ok(format!(
"<a class=\"section-reference\" href=\"#{}\">{caption}</a>",
Compiler::refname(compiler.target(), self.title.as_str())
))
}
_ => todo!(""),
}
}
fn refid(&self, compiler: &Compiler, _refid: usize) -> String {
Compiler::refname(compiler.target(), self.title.as_str())
todo!()
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::section")]
pub struct SectionRule {
re: [Regex; 1],
}
@ -162,14 +84,13 @@ pub mod section_kind {
impl RegexRule for SectionRule {
fn name(&self) -> &'static str { "Section" }
fn previous(&self) -> Option<&'static str> { Some("Custom Style") }
fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match(
&self,
_: usize,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
token: Token,
matches: regex::Captures,
@ -184,9 +105,9 @@ impl RegexRule for SectionRule {
.with_label(
Label::new((token.source(), depth.range()))
.with_message(format!("Section is of depth {}, which is greather than {} (maximum depth allowed)",
depth.len().fg(state.parser.colors().info),
6.fg(state.parser.colors().info)))
.with_color(state.parser.colors().error))
depth.len().fg(parser.colors().info),
6.fg(parser.colors().info)))
.with_color(parser.colors().error))
.finish());
return result;
}
@ -197,37 +118,37 @@ impl RegexRule for SectionRule {
};
// [Optional] Reference name
let section_refname =
matches.get(2).map_or_else(
|| None,
|refname| {
// Check for duplicate reference
if let Some(elem_reference) = document.get_reference(refname.as_str()) {
let elem = document.get_from_reference(&elem_reference).unwrap();
result.push(
let section_refname = matches.get(2).map_or_else(
|| None,
|refname| {
/* TODO: Wait for reference rework
// Check for duplicate reference
if let Some((ref_doc, reference)) = document.get_reference(refname.as_str())
{
result.push(
Report::build(ReportKind::Warning, token.source(), refname.start())
.with_message("Duplicate reference name")
.with_label(
Label::new((token.source(), refname.range()))
.with_message(format!("Reference with name `{}` is already defined in `{}`",
refname.as_str().fg(state.parser.colors().highlight),
elem.location().source().name().as_str().fg(state.parser.colors().highlight)))
refname.as_str().fg(parser.colors().highlight),
ref_doc.source().name().as_str().fg(parser.colors().highlight)))
.with_message(format!("`{}` conflicts with previously defined reference to {}",
refname.as_str().fg(state.parser.colors().highlight),
elem.element_name().fg(state.parser.colors().highlight)))
.with_color(state.parser.colors().warning))
refname.as_str().fg(parser.colors().highlight),
reference.element_name().fg(parser.colors().highlight)))
.with_color(parser.colors().warning))
.with_label(
Label::new((elem.location().source(), elem.location().start()..elem.location().end() ))
Label::new((ref_doc.source(), reference.location().start()+1..reference.location().end() ))
.with_message(format!("`{}` previously defined here",
refname.as_str().fg(state.parser.colors().highlight)))
.with_color(state.parser.colors().warning))
.with_note("Previous reference was overwritten".to_string())
refname.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().warning))
.with_note(format!("Previous reference was overwritten"))
.finish());
}
Some(refname.as_str().to_string())
},
);
}
*/
Some(refname.as_str().to_string())
},
);
// Section kind
let section_kind = match matches.get(3) {
@ -243,11 +164,11 @@ impl RegexRule for SectionRule {
.with_label(
Label::new((token.source(), kind.range()))
.with_message(format!("Section numbering kind must be a combination of `{}` for unnumbered, and `{}` for non-listing; got `{}`",
"*".fg(state.parser.colors().info),
"+".fg(state.parser.colors().info),
kind.as_str().fg(state.parser.colors().highlight)))
.with_color(state.parser.colors().error))
.with_help("Leave empty for a numbered listed section".to_string())
"*".fg(parser.colors().info),
"+".fg(parser.colors().info),
kind.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().error))
.with_help(format!("Leave empty for a numbered listed section"))
.finish());
return result;
}
@ -274,7 +195,7 @@ impl RegexRule for SectionRule {
.with_label(
Label::new((token.source(), name.range()))
.with_message("Sections require a name before line end")
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -289,8 +210,8 @@ impl RegexRule for SectionRule {
.with_label(
Label::new((token.source(), name.range()))
.with_message("Sections require at least one whitespace before the section's name")
.with_color(state.parser.colors().warning))
.with_help(format!("Add a space before `{}`", section_name.fg(state.parser.colors().highlight)))
.with_color(parser.colors().warning))
.with_help(format!("Add a space before `{}`", section_name.fg(parser.colors().highlight)))
.finish());
return result;
}
@ -300,16 +221,7 @@ impl RegexRule for SectionRule {
_ => panic!("Empty section name"),
};
// Get style
let style = state
.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
state.push(
parser.push(
document,
Box::new(Section {
location: token.clone(),
@ -317,21 +229,20 @@ impl RegexRule for SectionRule {
depth: section_depth,
kind: section_kind,
reference: section_refname,
style,
}),
);
result
return result;
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(
|_, (title, depth, kind, reference): (String, usize, Option<String>, Option<String>)| {
let kind = match kind.as_deref().unwrap_or("") {
|_, (title, depth, kind, reference): (String, usize, String, Option<String>)| {
let kind = match kind.as_str() {
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
"*" => section_kind::NO_NUMBER,
"+" => section_kind::NO_TOC,
@ -341,24 +252,16 @@ impl RegexRule for SectionRule {
to: Some("push".to_string()),
pos: 3,
name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external("Unknown section kind specified".to_string())),
cause: Arc::new(mlua::Error::external(format!(
"Unknown section kind specified"
))),
})
}
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
// Get style
let style = ctx
.state
.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Section {
location: ctx.location.clone(),
@ -366,7 +269,6 @@ impl RegexRule for SectionRule {
depth,
kind,
reference,
style,
}),
);
})
@ -380,137 +282,4 @@ impl RegexRule for SectionRule {
bindings
}
fn register_styles(&self, holder: &mut StyleHolder) {
holder.set_current(Rc::new(SectionStyle::default()));
}
}
mod section_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.section";
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub enum SectionLinkPos {
Before,
After,
None,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct SectionStyle {
pub link_pos: SectionLinkPos,
pub link: [String; 3],
}
impl Default for SectionStyle {
fn default() -> Self {
Self {
link_pos: SectionLinkPos::Before,
link: ["".into(), "🔗".into(), " ".into()],
}
}
}
impl_elementstyle!(SectionStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
# 1
##+ 2
###* 3
####+* 4
#####*+ 5
######{refname} 6
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section { depth == 1, title == "1" };
Section { depth == 2, title == "2", kind == section_kind::NO_TOC };
Section { depth == 3, title == "3", kind == section_kind::NO_NUMBER };
Section { depth == 4, title == "4", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 5, title == "5", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 6, title == "6", reference == Some("refname".to_string()) };
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<
nml.section.push("1", 1, "", nil)
nml.section.push("2", 2, "+", nil)
nml.section.push("3", 3, "*", nil)
nml.section.push("4", 4, "+*", nil)
nml.section.push("5", 5, "*+", nil)
nml.section.push("6", 6, "", "refname")
>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section { depth == 1, title == "1" };
Section { depth == 2, title == "2", kind == section_kind::NO_TOC };
Section { depth == 3, title == "3", kind == section_kind::NO_NUMBER };
Section { depth == 4, title == "4", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 5, title == "5", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 6, title == "6", reference == Some("refname".to_string()) };
);
}
#[test]
fn style() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
@@style.section = {
"link_pos": "None",
"link": ["a", "b", "c"]
}
"#
.to_string(),
None,
));
let parser = LangParser::default();
let state = ParserState::new(&parser, None);
let (_, state) = parser.parse(state, source, None);
let style = state.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
assert_eq!(style.link_pos, SectionLinkPos::None);
assert_eq!(style.link, ["a".to_string(), "b".to_string(), "c".to_string()]);
}
}

View file

@ -1,27 +1,10 @@
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use mlua::{Function, Lua};
use regex::{Captures, Regex};
use crate::{compiler::compiler::{Compiler, Target}, document::{document::{DocumentAccessors, Document}, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, state::State}};
use ariadne::{Fmt, Label, Report, ReportKind};
use crate::parser::state::Scope;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use regex::Captures;
use regex::Regex;
use std::cell::RefCell;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use std::{cell::RefCell, ops::Range, rc::Rc};
use lazy_static::lazy_static;
use super::paragraph::Paragraph;
@ -32,107 +15,104 @@ pub struct Style {
close: bool,
}
impl Style {
impl Style
{
pub fn new(location: Token, kind: usize, close: bool) -> Self {
Self {
location,
kind,
close,
}
Self { location, kind, close }
}
}
impl Element for Style {
impl Element for Style
{
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Style" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
match compiler.target() {
fn element_name(&self) -> &'static str { "Section" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target()
{
Target::HTML => {
Ok([
// Bold
"<b>", "</b>", // Italic
"<i>", "</i>", // Underline
"<u>", "</u>", // Code
"<b>", "</b>",
// Italic
"<i>", "</i>",
// Underline
"<u>", "</u>",
// Code
"<em>", "</em>",
][self.kind * 2 + self.close as usize]
.to_string())
][self.kind*2 + self.close as usize].to_string())
}
Target::LATEX => Err("Unimplemented compiler".to_string()),
Target::LATEX => Err("Unimplemented compiler".to_string())
}
}
}
struct StyleState {
toggled: [Option<Token>; 4],
struct StyleState
{
toggled: [Option<Token>; 4]
}
impl StyleState {
const NAMES: [&'static str; 4] = ["Bold", "Italic", "Underline", "Code"];
const NAMES : [&'static str; 4] = ["Bold", "Italic", "Underline", "Code"];
fn new() -> Self {
Self {
toggled: [None, None, None, None],
}
Self { toggled: [None, None, None, None] }
}
}
impl RuleState for StyleState {
impl State for StyleState
{
fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove<'a>(
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
fn on_remove<'a>(&self, parser: &dyn Parser, document: &dyn Document) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut result = Vec::new();
self.toggled
.iter()
.zip(StyleState::NAMES)
.for_each(|(token, name)| {
if token.is_none() {
return;
} // Style not enabled
let token = token.as_ref().unwrap();
.for_each(|(token, name)|
{
if token.is_none() { return } // Style not enabled
let token = token.as_ref().unwrap();
let paragraph = document.last_element::<Paragraph>().unwrap();
let paragraph_end = paragraph
.content
.last().map(|last| (
last.location().source(),
last.location().end() - 1..last.location().end(),
))
.unwrap();
//let range = range.as_ref().unwrap();
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Style {} starts here",
name.fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new(paragraph_end)
.with_order(1)
.with_message("Paragraph ends here".to_string())
.with_color(state.parser.colors().error),
)
.with_note("Styles cannot span multiple documents (i.e @import)")
.finish(),
);
});
//let active_range = range.start .. paragraph.location().end()-1;
reports
let paragraph = document.last_element::<Paragraph>().unwrap();
let paragraph_end = paragraph.content.last()
.and_then(|last| Some((last.location().source(), last.location().end()-1 .. last.location().end())))
.unwrap();
// TODO: Allow style to span multiple documents if they don't break paragraph.
result.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated style")
//.with_label(
// Label::new((document.source(), active_range.clone()))
// .with_order(0)
// .with_message(format!("Style {} is not terminated before the end of paragraph",
// name.fg(parser.colors().info)))
// .with_color(parser.colors().error))
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!("Style {} starts here",
name.fg(parser.colors().info)))
.with_color(parser.colors().info))
.with_label(
Label::new(paragraph_end)
.with_order(1)
.with_message(format!("Paragraph ends here"))
.with_color(parser.colors().info))
.with_note("Styles cannot span multiple documents (i.e @import)")
.finish());
});
return result;
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::style")]
pub struct StyleRule {
re: [Regex; 4],
}
@ -148,38 +128,31 @@ impl StyleRule {
// Underline
Regex::new(r"__").unwrap(),
// Code
Regex::new(r"`").unwrap(),
],
Regex::new(r"`").unwrap()
]
}
}
}
static STATE_NAME: &str = "elements.style";
lazy_static! {
static ref STATE_NAME : String = "elements.style".to_string();
}
impl RegexRule for StyleRule {
impl RegexRule for StyleRule
{
fn name(&self) -> &'static str { "Style" }
fn previous(&self) -> Option<&'static str> { Some("Layout") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
index: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
_matches: Captures,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let query = state.shared.rule_state.borrow().get(STATE_NAME);
let style_state = match query {
fn on_regex_match(&self, index: usize, parser: &dyn Parser, document: &dyn Document, token: Token, _matches: Captures) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let result = vec![];
let query = parser.state().query(&STATE_NAME);
let state = match query
{
Some(state) => state,
None => {
// Insert as a new state
match state
.shared
.rule_state
.borrow_mut()
.insert(STATE_NAME.into(), Rc::new(RefCell::new(StyleState::new())))
None => { // Insert as a new state
match parser.state_mut().insert(STATE_NAME.clone(), Rc::new(RefCell::new(StyleState::new())))
{
Err(_) => panic!("Unknown error"),
Ok(state) => state,
@ -187,181 +160,28 @@ impl RegexRule for StyleRule {
}
};
if let Some(style_state) = style_state.borrow_mut().downcast_mut::<StyleState>() {
style_state.toggled[index] = style_state.toggled[index]
.clone()
.map_or(Some(token.clone()), |_| None);
state.push(
document,
Box::new(Style::new(
if let Some(style_state) = state
.borrow_mut()
.as_any_mut()
.downcast_mut::<StyleState>()
{
style_state.toggled[index] = style_state.toggled[index].clone().map_or(Some(token.clone()), |_| None);
parser.push(document, Box::new(
Style::new(
token.clone(),
index,
style_state.toggled[index].is_none(),
)),
);
} else {
panic!("Invalid state at `{STATE_NAME}`");
!style_state.toggled[index].is_some()
)
));
}
else
{
panic!("Invalid state at `{}`", STATE_NAME.as_str());
}
vec![]
return result;
}
fn register_bindings<'lua>(&self, lua: &'lua mlua::Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"toggle".to_string(),
lua.create_function(|_, style: String| {
let kind = match style.as_str() {
"bold" | "Bold" => 0,
"italic" | "Italic" => 1,
"underline" | "Underline" => 2,
"emphasis" | "Emphasis" => 3,
_ => {
return Err(mlua::Error::BadArgument {
to: Some("toggle".to_string()),
pos: 1,
name: Some("style".to_string()),
cause: Arc::new(mlua::Error::external("Unknown style specified".to_string())),
})
}
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let query = ctx.state.shared.rule_state.borrow().get(STATE_NAME);
let style_state = match query {
Some(state) => state,
None => {
// Insert as a new state
match ctx.state.shared.rule_state.borrow_mut().insert(
STATE_NAME.into(),
Rc::new(RefCell::new(StyleState::new())),
) {
Err(_) => panic!("Unknown error"),
Ok(state) => state,
}
}
};
if let Some(style_state) =
style_state.borrow_mut().downcast_mut::<StyleState>()
{
style_state.toggled[kind] = style_state.toggled[kind]
.clone()
.map_or(Some(ctx.location.clone()), |_| None);
ctx.state.push(
ctx.document,
Box::new(Style::new(
ctx.location.clone(),
kind,
style_state.toggled[kind].is_none(),
)),
);
} else {
panic!("Invalid state at `{STATE_NAME}`");
};
})
});
Ok(())
})
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some *style
terminated here*
**BOLD + *italic***
__`UNDERLINE+EM`__
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text;
Style { kind == 1, close == false };
Text;
Style { kind == 1, close == true };
};
Paragraph {
Style { kind == 0, close == false }; // **
Text;
Style { kind == 1, close == false }; // *
Text;
Style { kind == 0, close == true }; // **
Style { kind == 1, close == true }; // *
Style { kind == 2, close == false }; // __
Style { kind == 3, close == false }; // `
Text;
Style { kind == 3, close == true }; // `
Style { kind == 2, close == true }; // __
};
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some %<nml.style.toggle("italic")>%style
terminated here%<nml.style.toggle("Italic")>%
%<nml.style.toggle("Bold")>%NOLD + %<nml.style.toggle("italic")>%italic%<nml.style.toggle("bold") nml.style.toggle("italic")>%
%<nml.style.toggle("Underline") nml.style.toggle("Emphasis")>%UNDERLINE+EM%<nml.style.toggle("emphasis")>%%<nml.style.toggle("underline")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text;
Style { kind == 1, close == false };
Text;
Style { kind == 1, close == true };
};
Paragraph {
Style { kind == 0, close == false }; // **
Text;
Style { kind == 1, close == false }; // *
Text;
Style { kind == 0, close == true }; // **
Style { kind == 1, close == true }; // *
Style { kind == 2, close == false }; // __
Style { kind == 3, close == false }; // `
Text;
Style { kind == 3, close == true }; // `
Style { kind == 2, close == true }; // __
};
);
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -6,7 +6,6 @@ use std::process::Command;
use std::process::Stdio;
use std::rc::Rc;
use std::str::FromStr;
use std::sync::Arc;
use std::sync::Once;
use ariadne::Fmt;
@ -28,8 +27,7 @@ use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
@ -114,7 +112,10 @@ impl FormattedTex {
}
let mut result = String::new();
if let Err(e) = process.stdout.unwrap().read_to_string(&mut result) { panic!("Unable to read `latex2svg` stdout: {}", e) }
match process.stdout.unwrap().read_to_string(&mut result) {
Err(e) => panic!("Unable to read `latex2svg` stdout: {}", e),
Ok(_) => {}
}
println!("Done!");
Ok(result)
@ -150,18 +151,15 @@ impl Element for Tex {
fn element_name(&self) -> &'static str { "LaTeX" }
fn compile(
&self,
compiler: &Compiler,
document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() {
if let Err(e) = FormattedTex::init(con) {
if let Some(mut con) = compiler.cache() {
if let Err(e) = FormattedTex::init(&mut con) {
eprintln!("Unable to create cache table: {e}");
}
}
@ -191,8 +189,8 @@ impl Element for Tex {
Tex::format_latex(&fontsize, &preamble, &format!("{prepend}{}", self.tex))
};
let result = if let Some(con) = compiler.cache() {
match latex.cached(con, |s| s.latex_to_svg(&exec, &fontsize)) {
let mut result = if let Some(mut con) = compiler.cache() {
match latex.cached(&mut con, |s| s.latex_to_svg(&exec, &fontsize)) {
Ok(s) => Ok(s),
Err(e) => match e {
CachedError::SqlErr(e) => {
@ -225,7 +223,6 @@ impl Element for Tex {
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::tex")]
pub struct TexRule {
re: [Regex; 2],
properties: PropertyParser,
@ -256,7 +253,7 @@ impl TexRule {
.unwrap(),
Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
],
properties: PropertyParser { properties: props },
properties: PropertyParser::new(props),
}
}
@ -303,14 +300,13 @@ impl TexRule {
impl RegexRule for TexRule {
fn name(&self) -> &'static str { "Tex" }
fn previous(&self) -> Option<&'static str> { Some("Code") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
index: usize,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
token: Token,
matches: Captures,
@ -327,10 +323,10 @@ impl RegexRule for TexRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Missing terminating `{}` after first `{}`",
["|$", "$"][index].fg(state.parser.colors().info),
["$|", "$"][index].fg(state.parser.colors().info)
["|$", "$"][index].fg(parser.colors().info),
["$|", "$"][index].fg(parser.colors().info)
))
.with_color(state.parser.colors().error),
.with_color(parser.colors().error),
)
.finish(),
);
@ -350,7 +346,7 @@ impl RegexRule for TexRule {
.with_label(
Label::new((token.source().clone(), content.range()))
.with_message("Tex code is empty")
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
@ -360,8 +356,7 @@ impl RegexRule for TexRule {
};
// Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(1))
{
let properties = match self.parse_properties(parser.colors(), &token, &matches.get(1)) {
Ok(pm) => pm,
Err(report) => {
reports.push(report);
@ -383,16 +378,16 @@ impl RegexRule for TexRule {
Label::new((token.source().clone(), token.range.clone()))
.with_message(format!(
"Property `kind: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info),
err.fg(state.parser.colors().error)
prop.fg(parser.colors().info),
err.fg(parser.colors().error)
))
.with_color(state.parser.colors().warning),
.with_color(parser.colors().warning),
)
.finish(),
);
return reports;
}
PropertyMapError::NotFoundError(_) => {
PropertyMapError::NotFoundError(err) => {
if index == 1 {
TexKind::Inline
} else {
@ -407,17 +402,19 @@ impl RegexRule for TexRule {
.get("caption", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok().map(|(_, value)| value);
.ok()
.and_then(|(_, value)| Some(value));
// Environ
let tex_env = properties
.get("env", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.ok().map(|(_, value)| value)
.ok()
.and_then(|(_, value)| Some(value))
.unwrap();
state.push(
parser.push(
document,
Box::new(Tex {
mathmode: index == 1,
@ -432,103 +429,14 @@ impl RegexRule for TexRule {
reports
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push_math".to_string(),
lua.create_function(
|_, (kind, tex, env, caption): (String, String, Option<String>, Option<String>)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let kind = match TexKind::from_str(kind.as_str()) {
Ok(kind) => kind,
Err(err) => {
result = Err(mlua::Error::BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get tex kind: {err}"
))),
});
return;
}
};
ctx.state.push(
ctx.document,
Box::new(Tex {
location: ctx.location.clone(),
mathmode: true,
kind,
env: env.unwrap_or("main".to_string()),
tex,
caption,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings.push((
"push".to_string(),
lua.create_function(
|_, (kind, tex, env, caption): (String, String, Option<String>, Option<String>)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let kind = match TexKind::from_str(kind.as_str()) {
Ok(kind) => kind,
Err(err) => {
result = Err(mlua::Error::BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get tex kind: {err}"
))),
});
return;
}
};
ctx.state.push(
ctx.document,
Box::new(Tex {
location: ctx.location.clone(),
mathmode: false,
kind,
env: env.unwrap_or("main".to_string()),
tex,
caption,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
@ -538,26 +446,28 @@ mod tests {
"".to_string(),
r#"
$[kind=block, caption=Some\, text\\] 1+1=2 $
$|[env=another] Non Math \LaTeX |$
$|[env=another] Non Math \LaTeX|$
$[kind=block,env=another] e^{i\pi}=-1$
%<nml.tex.push_math("block", "1+1=2", nil, "Some, text\\")>%
%<nml.tex.push("block", "Non Math \\LaTeX", "another", nil)>%
%<nml.tex.push_math("block", "e^{i\\pi}=-1", "another", nil)>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0,
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" };
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" };
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
);
let borrow = doc.content().borrow();
let found = borrow
.iter()
.filter_map(|e| e.downcast_ref::<Tex>())
.collect::<Vec<_>>();
assert_eq!(found[0].tex, "1+1=2");
assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
}
#[test]
@ -568,25 +478,31 @@ $[kind=block,env=another] e^{i\pi}=-1$
$[ caption=Some\, text\\] 1+1=2 $
$|[env=another, kind=inline , caption = Enclosed \]. ] Non Math \LaTeX|$
$[env=another] e^{i\pi}=-1$
%<nml.tex.push_math("inline", "1+1=2", "main", "Some, text\\")>%
%<nml.tex.push("inline", "Non Math \\LaTeX", "another", "Enclosed ].")>%
%<nml.tex.push_math("inline", "e^{i\\pi}=-1", "another", nil)>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another", caption == Some("Enclosed ].".to_string()) };
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) };
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another", caption == Some("Enclosed ].".to_string()) };
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" };
};
);
let borrow = doc.content().borrow();
let found = borrow
.first()
.unwrap()
.as_container()
.unwrap()
.contained()
.iter()
.filter_map(|e| e.downcast_ref::<Tex>())
.collect::<Vec<_>>();
assert_eq!(found[0].tex, "1+1=2");
assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[1].caption, Some("Enclosed ].".to_string()));
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
}
}

View file

@ -11,7 +11,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::Parser;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
@ -19,15 +19,15 @@ use crate::parser::source::Token;
#[derive(Debug)]
pub struct Text {
pub location: Token,
pub content: String,
pub(self) location: Token,
pub(self) content: String,
}
impl Text {
pub fn new(location: Token, content: String) -> Text {
Text {
location,
content,
location: location,
content: content,
}
}
}
@ -36,45 +36,39 @@ impl Element for Text {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Text" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(Compiler::sanitize(compiler.target(), self.content.as_str()))
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::text")]
#[derive(Default)]
pub struct TextRule;
impl TextRule {
pub fn new() -> Self { Self {} }
}
impl Rule for TextRule {
fn name(&self) -> &'static str { "Text" }
fn previous(&self) -> Option<&'static str> { Some("Link") }
fn next_match(&self, _state: &ParserState, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
None
}
fn next_match(&self, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { None }
fn on_match(
&self,
_state: &ParserState,
_parser: &dyn Parser,
_document: &dyn Document,
_cursor: Cursor,
_match_data: Box<dyn Any>,
_match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
panic!("Text cannot match");
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(|_, content: String| {
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
ctx.state.push(
ctx.parser.push(
ctx.document,
Box::new(Text {
location: ctx.location.clone(),

View file

@ -1,110 +1,78 @@
use crate::document::document::Document;
use crate::document::variable::BaseVariable;
use crate::document::variable::PathVariable;
use crate::document::variable::Variable;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use mlua::{Function, Lua};
use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
use std::str::FromStr;
use crate::{document::document::Document, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token}}};
use ariadne::{Report, Fmt, Label, ReportKind};
use crate::document::variable::{BaseVariable, PathVariable, Variable};
use std::{ops::Range, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum VariableKind {
Regular,
Path,
}
impl FromStr for VariableKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"regular" | "" => Ok(VariableKind::Regular),
"path" | "'" => Ok(VariableKind::Path),
_ => Err(format!("Uknnown variable kind: `{s}`")),
}
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::variable")]
pub struct VariableRule {
re: [Regex; 1],
kinds: Vec<(String, String)>,
kinds: Vec<(String, String)>,
}
impl VariableRule {
pub fn new() -> Self {
Self {
re: [Regex::new(r"(?:^|\n)@([^[:alpha:]])?(.*?)=((?:\\\n|.)*)").unwrap()],
kinds: vec![("".into(), "Regular".into()), ("'".into(), "Path".into())],
}
re: [Regex::new(r"(?:^|\n)@([^[:alpha:]])?(.*?)=((?:\\\n|.)*)").unwrap()],
kinds: vec![
("".into(), "Regular".into()),
("'".into(), "Path".into())
]
}
}
pub fn make_variable(
&self,
colors: &ReportColors,
location: Token,
kind: usize,
name: String,
value: String,
) -> Result<Rc<dyn Variable>, String> {
match self.kinds[kind].0.as_str() {
"" => Ok(Rc::new(BaseVariable::new(location, name, value))),
"'" => {
match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize
pub fn make_variable(&self, colors: &ReportColors, location: Token, kind: usize, name: String, value: String) -> Result<Rc<dyn Variable>, String>
{
match self.kinds[kind].0.as_str()
{
"" => {
Ok(Rc::new(BaseVariable::new(location, name, value)))
}
"'" => {
match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize
{
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))),
Err(e) => Err(format!("Unable to canonicalize path `{}`: {}",
value.fg(colors.highlight),
e))
e.to_string()))
}
}
_ => panic!("Unhandled variable kind"),
}
}
}
_ => panic!("Unhandled variable kind")
}
}
// Trim and check variable name for validity
pub fn validate_name<'a>(
colors: &ReportColors,
original_name: &'a str,
) -> Result<&'a str, String> {
let name = original_name.trim_start().trim_end();
if name.contains("%") {
return Err(format!("Name cannot contain '{}'", "%".fg(colors.info)));
}
Ok(name)
}
// Trim and check variable name for validity
pub fn validate_name<'a>(colors: &ReportColors, original_name: &'a str) -> Result<&'a str, String>
{
let name = original_name.trim_start().trim_end();
if name.contains("%")
{
return Err(format!("Name cannot contain '{}'",
"%".fg(colors.info)));
}
return Ok(name);
}
pub fn validate_value(original_value: &str) -> Result<String, String> {
pub fn validate_value(_colors: &ReportColors, original_value: &str) -> Result<String, String>
{
let mut escaped = 0usize;
let mut result = String::new();
for c in original_value.trim_start().trim_end().chars() {
if c == '\\' {
escaped += 1
} else if c == '\n' {
if c == '\\' { escaped += 1 }
else if c == '\n' {
match escaped {
0 => return Err("Unknown error wile capturing value".to_string()),
0 => return Err("Unknown error wile capturing variable".to_string()),
// Remove '\n'
1 => {}
1 => {},
// Insert '\n'
_ => {
result.push(c);
(0..escaped - 2).for_each(|_| result.push('\\'));
(0..escaped-2).for_each(|_| result.push('\\'));
}
}
escaped = 0;
} else {
}
else {
(0..escaped).for_each(|_| result.push('\\'));
escaped = 0;
result.push(c);
@ -113,310 +81,241 @@ impl VariableRule {
(0..escaped).for_each(|_| result.push('\\'));
Ok(result)
}
}
}
impl RegexRule for VariableRule {
fn name(&self) -> &'static str { "Variable" }
fn previous(&self) -> Option<&'static str> { Some("Element Style") }
fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match(
&self,
_: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>
{
let mut result = vec![];
// [Optional] variable kind
let var_kind = match matches.get(1) {
// [Optional] variable kind
let var_kind = match matches.get(1)
{
Some(kind) => {
// Find kind
let r = self
.kinds
.iter()
.enumerate()
.find(|(_i, (ref char, ref _name))| char == kind.as_str());
// Find kind
let r = self.kinds.iter().enumerate().find(|(_i, (ref char, ref _name))| {
char == kind.as_str() });
// Unknown kind specified
if r.is_none() {
result.push(
Report::build(ReportKind::Error, token.source(), kind.start())
.with_message("Unknown variable kind")
.with_label(
Label::new((token.source(), kind.range()))
.with_message(format!(
"Variable kind `{}` is unknown",
kind.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.with_help(format!(
"Leave empty for regular variables. Available variable kinds:{}",
self.kinds.iter().skip(1).fold(
"".to_string(),
|acc, (char, name)| {
acc + format!(
"\n - `{}` : {}",
char.fg(state.parser.colors().highlight),
name.fg(state.parser.colors().info)
)
.as_str()
}
)
))
.finish(),
);
// Unknown kind specified
if r.is_none()
{
result.push(
Report::build(ReportKind::Error, token.source(), kind.start())
.with_message("Unknown variable kind")
.with_label(
Label::new((token.source(), kind.range()))
.with_message(format!("Variable kind `{}` is unknown",
kind.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().error))
.with_help(format!("Leave empty for regular variables. Available variable kinds:{}",
self.kinds.iter().skip(1).fold("".to_string(), |acc, (char, name)| {
acc + format!("\n - `{}` : {}",
char.fg(parser.colors().highlight),
name.fg(parser.colors().info)).as_str()
})))
.finish());
return result;
}
return result;
}
r.unwrap().0
r.unwrap().0
}
None => 0,
};
let var_name = match matches.get(2) {
Some(name) => match VariableRule::validate_name(state.parser.colors(), name.as_str()) {
Ok(var_name) => var_name,
Err(msg) => {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Variable name `{}` is not allowed. {msg}",
name.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
let var_name = match matches.get(2)
{
Some(name) => {
match VariableRule::validate_name(&parser.colors(), name.as_str())
{
Ok(var_name) => var_name,
Err(msg) => {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!("Variable name `{}` is not allowed. {msg}",
name.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().error))
.finish());
return result;
}
},
_ => panic!("Unknown variable name"),
};
return result;
},
}
},
_ => panic!("Unknown variable name")
};
let var_value = match matches.get(3) {
Some(value) => match VariableRule::validate_value(value.as_str()) {
Ok(var_value) => var_value,
Err(msg) => {
result.push(
Report::build(ReportKind::Error, token.source(), value.start())
.with_message("Invalid variable value")
.with_label(
Label::new((token.source(), value.range()))
.with_message(format!(
"Variable value `{}` is not allowed. {msg}",
value.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
let var_value = match matches.get(3)
{
Some(value) => {
match VariableRule::validate_value(&parser.colors(), value.as_str())
{
Ok(var_value) => var_value,
Err(msg ) => {
result.push(
Report::build(ReportKind::Error, token.source(), value.start())
.with_message("Invalid variable value")
.with_label(
Label::new((token.source(), value.range()))
.with_message(format!("Variable value `{}` is not allowed. {msg}",
value.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().error))
.finish());
return result;
}
},
_ => panic!("Invalid variable value"),
};
return result;
}
}
}
_ => panic!("Invalid variable value")
};
match self.make_variable(
state.parser.colors(),
token.clone(),
var_kind,
var_name.to_string(),
var_value,
) {
Ok(variable) => document.add_variable(variable),
Err(msg) => {
let m = matches.get(0).unwrap();
result.push(
Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Unable to create variable")
.with_label(
Label::new((token.source(), m.start() + 1..m.end()))
.with_message(format!(
"Unable to create variable `{}`. {}",
var_name.fg(state.parser.colors().highlight),
msg
))
.with_color(state.parser.colors().error),
)
.finish(),
);
match self.make_variable(&parser.colors(), token.clone(), var_kind, var_name.to_string(), var_value)
{
Ok(variable) => document.add_variable(variable),
Err(msg) => {
let m = matches.get(0).unwrap();
result.push(
Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Unable to create variable")
.with_label(
Label::new((token.source(), m.start()+1 .. m.end() ))
.with_message(format!("Unable to create variable `{}`. {}",
var_name.fg(parser.colors().highlight),
msg))
.with_color(parser.colors().error))
.finish());
return result;
}
}
return result;
}
}
result
return result;
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"insert".to_string(),
lua.create_function(|_, (name, value): (String, String)| {
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let var = Rc::new(BaseVariable::new(ctx.location.clone(), name, value));
ctx.document.add_variable(var);
})
});
Ok(())
})
.unwrap(),
));
bindings.push((
"get".to_string(),
lua.create_function(|_, name: String| {
let mut value: Option<String> = None;
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(var) = ctx.document.get_variable(name.as_str())
{
value = Some(var.to_string());
}
})
});
Ok(value)
})
.unwrap(),
));
bindings
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::variable")]
pub struct VariableSubstitutionRule {
pub struct VariableSubstitutionRule
{
re: [Regex; 1],
}
impl VariableSubstitutionRule {
pub fn new() -> Self {
Self {
re: [Regex::new(r"%(.*?)%").unwrap()],
}
re: [Regex::new(r"%(.*?)%").unwrap()],
}
}
}
impl RegexRule for VariableSubstitutionRule {
fn name(&self) -> &'static str { "Variable Substitution" }
fn previous(&self) -> Option<&'static str> { Some("Variable") }
impl RegexRule for VariableSubstitutionRule
{
fn name(&self) -> &'static str { "Variable Substitution" }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>(
&self,
_index: usize,
state: &ParserState,
document: &'a dyn Document<'a>,
token: Token,
matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
fn on_regex_match<'a>(&self, _index: usize, parser: &dyn Parser, document: &'a dyn Document<'a>, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut result = vec![];
let variable = match matches.get(1) {
Some(name) => {
// Empty name
if name.as_str().is_empty() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty variable name")
.with_label(
Label::new((token.source(), matches.get(0).unwrap().range()))
.with_message("Missing variable name for substitution".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
let variable = match matches.get(1)
{
Some(name) => {
// Empty name
if name.as_str().is_empty()
{
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty variable name")
.with_label(
Label::new((token.source(), matches.get(0).unwrap().range()))
.with_message(format!("Missing variable name for substitution"))
.with_color(parser.colors().error))
.finish());
return result;
}
// Leading spaces
else if name.as_str().trim_start() != name.as_str() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message("Variable names contains leading spaces".to_string())
.with_color(state.parser.colors().error),
)
.with_help("Remove leading spaces")
.finish(),
);
return result;
}
// Leading spaces
else if name.as_str().trim_start() != name.as_str()
{
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!("Variable names contains leading spaces"))
.with_color(parser.colors().error))
.with_help("Remove leading spaces")
.finish());
return result;
}
// Trailing spaces
else if name.as_str().trim_end() != name.as_str() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message("Variable names contains trailing spaces".to_string())
.with_color(state.parser.colors().error),
)
.with_help("Remove trailing spaces")
.finish(),
);
return result;
}
// Trailing spaces
else if name.as_str().trim_end() != name.as_str()
{
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!("Variable names contains trailing spaces"))
.with_color(parser.colors().error))
.with_help("Remove trailing spaces")
.finish());
return result;
}
return result;
}
// Invalid name
if let Err(msg) = VariableRule::validate_name(state.parser.colors(), name.as_str()) {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(msg)
.with_color(state.parser.colors().error),
)
.finish(),
);
return result;
}
// Get variable
match document.get_variable(name.as_str()) {
None => {
match VariableRule::validate_name(&parser.colors(), name.as_str())
{
Err(msg) =>
{
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Unknown variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Unable to find variable with name: `{}`",
name.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
.with_message("Invalid variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(msg)
.with_color(parser.colors().error))
.finish());
return result;
}
Some(var) => var,
}
}
_ => panic!("Unknown error"),
};
_ => {},
}
// Get variable
match document.get_variable(name.as_str())
{
None => {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Unknown variable name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!("Unable to find variable with name: `{}`",
name.as_str().fg(parser.colors().highlight)))
.with_color(parser.colors().error))
.finish());
return result;
}
Some(var) => var,
}
},
_ => panic!("Unknown error")
};
variable.parse(state, token, document);
variable.parse(token, parser, document);
result
}
return result;
}
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
}

View file

@ -1,6 +1,6 @@
use std::{cell::{Ref, RefCell, RefMut}, collections::HashMap, rc::Rc};
use std::{cell::{RefCell, RefMut}, collections::HashMap, rc::Rc};
use crate::{document::{customstyle::{CustomStyle, CustomStyleHolder}, document::Document, element::Element, layout::{LayoutHolder, LayoutType}, style::{ElementStyle, StyleHolder}}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}};
use crate::{document::{document::Document, element::Element}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}};
#[derive(Debug, Clone)]
pub struct LineCursor
@ -94,3 +94,55 @@ impl From<&LineCursor> for Cursor
}
}
}
#[derive(Debug)]
pub struct LsParser
{
rules: Vec<Box<dyn Rule>>,
colors: ReportColors,
// Parser state
pub state: RefCell<StateHolder>,
pub kernels: RefCell<HashMap<String, Kernel>>,
}
impl Parser for LsParser
{
fn colors(&self) -> &ReportColors { &self.colors }
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
fn has_error(&self) -> bool { true }
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>) {
todo!()
}
fn parse<'a>(&self, source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Box<dyn Document<'a>+'a> {
todo!()
}
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
todo!()
}
}
impl KernelHolder for LsParser
{
fn get_kernel(&self, name: &str)
-> Option<RefMut<'_, Kernel>> {
RefMut::filter_map(self.kernels.borrow_mut(),
|map| map.get_mut(name)).ok()
}
fn insert_kernel(&self, name: String, kernel: Kernel)
-> RefMut<'_, Kernel> {
//TODO do not get
self.kernels.borrow_mut()
.insert(name.clone(), kernel);
self.get_kernel(name.as_str()).unwrap()
}
}

View file

@ -1,57 +1,63 @@
use std::cell::RefCell;
use std::collections::HashMap;
use std::cell::{RefCell, RefMut};
use mlua::Lua;
use crate::document::document::Document;
use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::source::Token;
use crate::{document::document::Document, parser::{parser::Parser, source::Token}};
pub struct KernelContext<'a, 'b, 'c> {
pub struct KernelContext<'a, 'b>
{
pub location: Token,
pub state: &'a ParserState<'a, 'b>,
pub document: &'c dyn Document<'c>,
pub parser: &'a dyn Parser,
pub document: &'b dyn Document<'b>,
//pub parser: &'a dyn Parser,
}
thread_local! {
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
pub static CTX: RefCell<Option<KernelContext<'static, 'static>>> = RefCell::new(None);
}
#[derive(Debug)]
pub struct Kernel {
pub struct Kernel
{
lua: Lua,
}
impl Kernel {
pub fn new(parser: &dyn Parser) -> Self {
// TODO: Take parser as arg and
// iterate over the rules
// to find export the bindings (if some)
pub fn new(parser: &dyn Parser) -> Self {
let lua = Lua::new();
{
let nml_table = lua.create_table().unwrap();
for rule in parser.rules() {
for rule in parser.rules()
{
let table = lua.create_table().unwrap();
// TODO: Export this so we can check for duplicate rules based on this name
let name = rule.name().to_lowercase().replace(' ', "_");
for (fun_name, fun) in rule.register_bindings(&lua) {
let name = rule.name().to_lowercase();
for (fun_name, fun) in rule.lua_bindings(&lua)
{
table.set(fun_name, fun).unwrap();
}
nml_table.set(name, table).unwrap();
}
lua.globals().set("nml", nml_table).unwrap();
}
Self { lua }
}
}
/// Runs a procedure with a context
///
/// This is the only way lua code shoule be ran, because exported
/// functions may require the context in order to operate
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F) -> T
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F)
-> T
where
F: FnOnce(&Lua) -> T,
F: FnOnce(&Lua) -> T
{
CTX.set(Some(unsafe { std::mem::transmute(context) }));
let ret = f(&self.lua);
@ -61,15 +67,9 @@ impl Kernel {
}
}
#[derive(Default)]
pub struct KernelHolder {
kernels: HashMap<String, Kernel>,
}
pub trait KernelHolder
{
fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>>;
impl KernelHolder {
pub fn get(&self, kernel_name: &str) -> Option<&Kernel> { self.kernels.get(kernel_name) }
pub fn insert(&mut self, kernel_name: String, kernel: Kernel) {
self.kernels.insert(kernel_name, kernel);
}
fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel>;
}

View file

@ -1,3 +1,4 @@
#![feature(char_indices_offset)]
mod cache;
mod compiler;
mod document;
@ -8,13 +9,23 @@ mod parser;
use std::env;
use std::io::BufWriter;
use std::io::Write;
use std::path::PathBuf;
use std::process::ExitCode;
use std::rc::Rc;
use std::time::UNIX_EPOCH;
use compiler::compiler::CompiledDocument;
use compiler::compiler::Compiler;
use compiler::compiler::Target;
use compiler::navigation::create_navigation;
use document::document::Document;
use getopts::Options;
use parser::langparser::LangParser;
use parser::parser::Parser;
use rusqlite::Connection;
use walkdir::WalkDir;
use crate::parser::source::SourceFile;
extern crate getopts;
fn print_usage(program: &str, opts: Options) {
@ -36,6 +47,122 @@ NML version: 0.4\n"
);
}
fn parse(input: &str, debug_opts: &Vec<String>) -> Result<Box<dyn Document<'static>>, String> {
println!("Parsing {input}...");
let parser = LangParser::default();
// Parse
let source = SourceFile::new(input.to_string(), None).unwrap();
let doc = parser.parse(Rc::new(source), None);
if debug_opts.contains(&"ast".to_string()) {
println!("-- BEGIN AST DEBUGGING --");
doc.content()
.borrow()
.iter()
.for_each(|elem| println!("{}", (elem).to_string()));
println!("-- END AST DEBUGGING --");
}
if debug_opts.contains(&"ref".to_string()) {
println!("-- BEGIN REFERENCES DEBUGGING --");
let sc = doc.scope().borrow();
sc.referenceable.iter().for_each(|(name, reference)| {
println!(" - {name}: `{:#?}`", doc.get_from_reference(reference));
});
println!("-- END REFERENCES DEBUGGING --");
}
if debug_opts.contains(&"var".to_string()) {
println!("-- BEGIN VARIABLES DEBUGGING --");
let sc = doc.scope().borrow();
sc.variables.iter().for_each(|(_name, var)| {
println!(" - `{:#?}`", var);
});
println!("-- END VARIABLES DEBUGGING --");
}
if parser.has_error() {
return Err("Parsing failed aborted due to errors while parsing".to_string());
}
Ok(doc)
}
fn process(
target: Target,
files: Vec<PathBuf>,
db_path: &Option<String>,
force_rebuild: bool,
debug_opts: &Vec<String>,
) -> Result<Vec<CompiledDocument>, String> {
let mut compiled = vec![];
let current_dir = std::env::current_dir()
.map_err(|err| format!("Unable to get the current working directory: {err}"))?;
let con = db_path
.as_ref()
.map_or(Connection::open_in_memory(), |path| Connection::open(path))
.map_err(|err| format!("Unable to open connection to the database: {err}"))?;
CompiledDocument::init_cache(&con)
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
for file in files {
let meta = std::fs::metadata(&file)
.map_err(|err| format!("Failed to get metadata for `{file:#?}`: {err}"))?;
let modified = meta
.modified()
.map_err(|err| format!("Unable to query modification time for `{file:#?}`: {err}"))?;
// Move to file's directory
let file_parent_path = file
.parent()
.ok_or(format!("Failed to get parent path for `{file:#?}`"))?;
std::env::set_current_dir(file_parent_path)
.map_err(|err| format!("Failed to move to path `{file_parent_path:#?}`: {err}"))?;
let parse_and_compile = || -> Result<CompiledDocument, String> {
// Parse
let doc = parse(file.to_str().unwrap(), debug_opts)?;
// Compile
let compiler = Compiler::new(target, db_path.clone());
let mut compiled = compiler.compile(&*doc);
// Insert into cache
compiled.mtime = modified.duration_since(UNIX_EPOCH).unwrap().as_secs();
compiled.insert_cache(&con).map_err(|err| {
format!("Failed to insert compiled document from `{file:#?}` into cache: {err}")
})?;
Ok(compiled)
};
let cdoc = if force_rebuild {
parse_and_compile()?
} else {
match CompiledDocument::from_cache(&con, file.to_str().unwrap()) {
Some(compiled) => {
if compiled.mtime < modified.duration_since(UNIX_EPOCH).unwrap().as_secs() {
parse_and_compile()?
} else {
compiled
}
}
None => parse_and_compile()?,
}
};
compiled.push(cdoc);
}
std::env::set_current_dir(current_dir)
.map_err(|err| format!("Failed to set current directory: {err}"))?;
Ok(compiled)
}
fn main() -> ExitCode {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
@ -72,7 +199,7 @@ fn main() -> ExitCode {
let input_meta = match std::fs::metadata(&input) {
Ok(meta) => meta,
Err(e) => {
eprintln!("Unable to get metadata for input `{input}`: {e}");
eprintln!("Unable to get metadata for input: `{input}`");
return ExitCode::FAILURE;
}
};
@ -89,14 +216,9 @@ fn main() -> ExitCode {
}
}
match std::fs::metadata(&output) {
Ok(output_meta) => {
if !output_meta.is_dir() {
eprintln!("Input is a directory, but ouput is not a directory, halting");
return ExitCode::FAILURE;
}
}
Ok(_) => {}
Err(e) => {
eprintln!("Unable to get metadata for output `{output}`: {e}");
eprintln!("Unable to get metadata for output: `{output}`");
return ExitCode::FAILURE;
}
}
@ -104,7 +226,7 @@ fn main() -> ExitCode {
let output_meta = match std::fs::metadata(&output) {
Ok(meta) => meta,
Err(e) => {
eprintln!("Unable to get metadata for output `{output}`: {e}");
eprintln!("Unable to get metadata for output: `{output}`");
return ExitCode::FAILURE;
}
};
@ -180,7 +302,7 @@ fn main() -> ExitCode {
}
}
Err(e) => {
eprintln!("Faield to get metadata for `{entry:#?}`: {e}");
eprintln!("Faield to get metadata for `{entry:#?}`");
return ExitCode::FAILURE;
}
}
@ -213,21 +335,19 @@ fn main() -> ExitCode {
}
// Parse, compile using the cache
let processed =
match compiler::process::process(Target::HTML, files, &db_path, force_rebuild, &debug_opts)
{
Ok(processed) => processed,
Err(e) => {
eprintln!("{e}");
return ExitCode::FAILURE;
}
};
let compiled = match process(Target::HTML, files, &db_path, force_rebuild, &debug_opts) {
Ok(compiled) => compiled,
Err(e) => {
eprintln!("{e}");
return ExitCode::FAILURE;
}
};
if input_meta.is_dir()
// Batch mode
{
// Build navigation
let navigation = match create_navigation(&processed) {
let navigation = match create_navigation(&compiled) {
Ok(nav) => nav,
Err(e) => {
eprintln!("{e}");
@ -236,54 +356,38 @@ fn main() -> ExitCode {
};
// Output
for (doc, _) in &processed {
for doc in compiled {
let out_path = match doc
.borrow()
.get_variable("compiler.output")
.or(input_meta.is_file().then_some(&output))
{
Some(path) => path.clone(),
None => {
eprintln!("Unable to get output file for `{}`", doc.borrow().input);
eprintln!("Unable to get output file for `{}`", doc.input);
continue;
}
};
let nav = navigation.compile(Target::HTML, doc);
let nav = navigation.compile(Target::HTML, &doc);
let file = std::fs::File::create(output.clone() + "/" + out_path.as_str()).unwrap();
let mut writer = BufWriter::new(file);
write!(
writer,
"{}{}{}{}",
doc.borrow().header,
nav,
doc.borrow().body,
doc.borrow().footer
)
.unwrap();
write!(writer, "{}{}{}{}", doc.header, nav, doc.body, doc.footer).unwrap();
writer.flush().unwrap();
}
} else
// Single file
{
for (doc, _) in &processed {
for doc in compiled {
let file = std::fs::File::create(output.clone()).unwrap();
let mut writer = BufWriter::new(file);
write!(
writer,
"{}{}{}",
doc.borrow().header,
doc.borrow().body,
doc.borrow().footer
)
.unwrap();
write!(writer, "{}{}{}", doc.header, doc.body, doc.footer).unwrap();
writer.flush().unwrap();
}
}
ExitCode::SUCCESS
return ExitCode::SUCCESS;
}

View file

@ -1,62 +0,0 @@
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::ops::Deref;
use ariadne::Report;
use crate::document::document::Document;
use crate::parser::source::Source;
use crate::parser::source::Token;
use super::parser::ParserState;
#[derive(Debug, PartialEq, Eq)]
pub enum CustomStyleToken {
Toggle(String),
Pair(String, String),
}
pub trait CustomStyle: core::fmt::Debug {
/// Name for the custom style
fn name(&self) -> &str;
/// Gets the begin and end token for a custom style
fn tokens(&self) -> &CustomStyleToken;
fn on_start<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
fn on_end<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
}
#[derive(Default)]
pub struct CustomStyleHolder {
custom_styles: HashMap<String, Rc<dyn CustomStyle>>,
}
impl CustomStyleHolder {
pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
self.custom_styles
.get(style_name).cloned()
}
pub fn insert(&mut self, style: Rc<dyn CustomStyle>) {
self.custom_styles.insert(style.name().into(), style);
}
}
impl Deref for CustomStyleHolder {
type Target = HashMap<String, Rc<dyn CustomStyle>>;
fn deref(&self) -> &Self::Target {
&self.custom_styles
}
}

View file

@ -1,18 +1,35 @@
use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ops::Range;
use std::rc::Rc;
use ariadne::Label;
use ariadne::Report;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::DocumentEnd;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::document::langdocument::LangDocument;
use crate::elements::paragraph::Paragraph;
use crate::elements::registrar::register;
use crate::elements::text::Text;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use super::parser::Parser;
use super::parser::ParserState;
use super::parser::ReportColors;
use super::rule::Rule;
use super::source::Cursor;
use super::source::Source;
use super::source::Token;
use super::state::StateHolder;
use super::util;
/// Parser for the language
@ -23,6 +40,8 @@ pub struct LangParser {
// Parser state
pub err_flag: RefCell<bool>,
pub state: RefCell<StateHolder>,
pub kernels: RefCell<HashMap<String, Kernel>>,
}
impl LangParser {
@ -31,16 +50,73 @@ impl LangParser {
rules: vec![],
colors: ReportColors::with_colors(),
err_flag: RefCell::new(false),
state: RefCell::new(StateHolder::new()),
kernels: RefCell::new(HashMap::new()),
};
register(&mut s);
// Register rules
for rule in super::rule::get_rule_registry()
{
s.add_rule(rule).unwrap();
}
s.kernels
.borrow_mut()
.insert("main".to_string(), Kernel::new(&s));
s
}
fn handle_reports<'a>(
&self,
_source: Rc<dyn Source>,
reports: Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>,
) {
for mut report in reports {
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
sources.insert(source.clone());
match source.location() {
Some(parent) => {
let parent_source = parent.source();
if sources.get(&parent_source).is_none() {
recurse_source(sources, parent_source);
}
}
None => {}
}
}
report.labels.iter().for_each(|label| {
recurse_source(&mut sources, label.span.0.clone());
});
let cache = sources
.iter()
.map(|source| (source.clone(), source.content().clone()))
.collect::<Vec<(Rc<dyn Source>, String)>>();
cache.iter().for_each(|(source, _)| {
if let Some(location) = source.location() {
if let Some(_s) = source.downcast_ref::<SourceFile>() {
report.labels.push(
Label::new((location.source(), location.start() + 1..location.end()))
.with_message("In file included from here")
.with_order(-1),
);
};
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
let start = location.start()
+ (location.source().content().as_bytes()[location.start()]
== '\n' as u8)
.then_some(1)
.unwrap_or(0);
report.labels.push(
Label::new((location.source(), start..location.end()))
.with_message("In evaluation of")
.with_order(-1),
);
};
}
});
report.eprint(ariadne::sources(cache)).unwrap()
}
}
}
impl Parser for LangParser {
@ -49,15 +125,50 @@ impl Parser for LangParser {
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
fn has_error(&self) -> bool { *self.err_flag.borrow() }
fn parse<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
/// Add an [`Element`] to the [`Document`]
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>) {
if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
let mut paragraph = doc
.last_element_mut::<Paragraph>()
.or_else(|| {
doc.push(Box::new(Paragraph {
location: elem.location().clone(),
content: Vec::new(),
}));
doc.last_element_mut::<Paragraph>()
})
.unwrap();
paragraph.push(elem).unwrap();
} else {
// Process paragraph events
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
self.handle_reports(
doc.source(),
self.state_mut()
.on_scope_end(self, doc, super::state::Scope::PARAGRAPH),
);
}
doc.push(elem);
}
}
fn parse<'a>(
&self,
source: Rc<dyn Source>,
parent: Option<&'doc dyn Document<'doc>>,
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
parent: Option<&'a dyn Document<'a>>,
) -> Box<dyn Document<'a> + 'a> {
let doc = LangDocument::new(source.clone(), parent);
let mut matches = Vec::new();
for _ in 0..self.rules.len() {
matches.push((0usize, None));
}
let content = source.content();
let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file
@ -65,21 +176,21 @@ impl Parser for LangParser {
if let Some(parent) = parent
// Terminate parent's paragraph state
{
self.handle_reports(state.shared.rule_state.borrow_mut().on_scope_end(
&state,
parent,
super::state::Scope::PARAGRAPH,
));
self.handle_reports(
parent.source(),
self.state_mut()
.on_scope_end(self, parent, super::state::Scope::PARAGRAPH),
);
}
loop {
let (rule_pos, mut result) = state.update_matches(&cursor);
let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
// Unmatched content
let text_content =
util::process_text(&doc, &content.as_str()[cursor.pos..rule_pos.pos]);
if !text_content.is_empty() {
state.push(
self.push(
&doc,
Box::new(Text::new(
Token::new(cursor.pos..rule_pos.pos, source.clone()),
@ -88,13 +199,12 @@ impl Parser for LangParser {
);
}
if let Some((rule_index, match_data)) = result.take() {
if let Some(rule) = rule {
// Rule callback
let dd: &'a dyn Document = unsafe { std::mem::transmute(&doc as &dyn Document) };
let (new_cursor, reports) =
self.rules[rule_index].on_match(&state, dd, rule_pos, match_data);
let (new_cursor, reports) = rule.on_match(self, dd, rule_pos, match_data);
self.handle_reports(reports);
self.handle_reports(doc.source(), reports);
// Advance
cursor = new_cursor;
@ -105,14 +215,14 @@ impl Parser for LangParser {
}
}
// Rule States
self.handle_reports(state.shared.rule_state.borrow_mut().on_scope_end(
&state,
&doc,
super::state::Scope::DOCUMENT,
));
// State
self.handle_reports(
doc.source(),
self.state_mut()
.on_scope_end(self, &doc, super::state::Scope::DOCUMENT),
);
state.push(
self.push(
&doc,
Box::new(DocumentEnd(Token::new(
doc.source().content().len()..doc.source().content().len(),
@ -120,26 +230,26 @@ impl Parser for LangParser {
))),
);
(Box::new(doc), state)
return Box::new(doc);
}
fn parse_into<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
source: Rc<dyn Source>,
document: &'doc dyn Document<'doc>,
) -> ParserState<'p, 'a> {
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
let mut matches = Vec::new();
for _ in 0..self.rules.len() {
matches.push((0usize, None));
}
let content = source.content();
let mut cursor = Cursor::new(0usize, source.clone());
loop {
let (rule_pos, mut result) = state.update_matches(&cursor);
let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
// Unmatched content
let text_content =
util::process_text(document, &content.as_str()[cursor.pos..rule_pos.pos]);
if !text_content.is_empty() {
state.push(
self.push(
document,
Box::new(Text::new(
Token::new(cursor.pos..rule_pos.pos, source.clone()),
@ -148,12 +258,11 @@ impl Parser for LangParser {
);
}
if let Some((rule_index, match_data)) = result.take() {
if let Some(rule) = rule {
// Rule callback
let (new_cursor, reports) =
self.rules[rule_index].on_match(&state, document, rule_pos, match_data);
let (new_cursor, reports) = (*rule).on_match(self, document, rule_pos, match_data);
self.handle_reports(reports);
self.handle_reports(document.source(), reports);
// Advance
cursor = new_cursor;
@ -164,7 +273,6 @@ impl Parser for LangParser {
}
}
state
// State
//self.handle_reports(source.clone(),
// self.state_mut().on_scope_end(&self, &document, super::state::Scope::DOCUMENT));
@ -172,3 +280,15 @@ impl Parser for LangParser {
//return doc;
}
}
impl KernelHolder for LangParser {
fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>> {
RefMut::filter_map(self.kernels.borrow_mut(), |map| map.get_mut(name)).ok()
}
fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel> {
//TODO do not get
self.kernels.borrow_mut().insert(name.clone(), kernel);
self.get_kernel(name.as_str()).unwrap()
}
}

View file

@ -1,45 +0,0 @@
use std::any::Any;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use crate::compiler::compiler::Compiler;
use crate::document::document::Document;
use crate::elements::layout::LayoutToken;
/// Represents the type of a layout
pub trait LayoutType: core::fmt::Debug {
/// Name of the layout
fn name(&self) -> &'static str;
/// Parses layout properties
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String>;
/// Expected number of blocks
fn expects(&self) -> Range<usize>;
/// Compile layout
fn compile(
&self,
token: LayoutToken,
id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
document: &dyn Document,
) -> Result<String, String>;
}
#[derive(Default)]
pub struct LayoutHolder {
layouts: HashMap<String, Rc<dyn LayoutType>>,
}
impl LayoutHolder {
pub fn get(&self, layout_name: &str) -> Option<Rc<dyn LayoutType>> {
self.layouts.get(layout_name).cloned()
}
pub fn insert(&mut self, layout: Rc<dyn LayoutType>) {
self.layouts.insert(layout.name().into(), layout);
}
}

View file

@ -4,6 +4,3 @@ pub mod rule;
pub mod source;
pub mod state;
pub mod util;
pub mod style;
pub mod layout;
pub mod customstyle;

View file

@ -1,29 +1,16 @@
use ariadne::Label;
use ariadne::Report;
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashSet;
use std::ops::Range;
use std::cell::Ref;
use std::cell::RefMut;
use std::rc::Rc;
use unicode_segmentation::UnicodeSegmentation;
use super::customstyle::CustomStyleHolder;
use super::layout::LayoutHolder;
use super::rule::Rule;
use super::source::Cursor;
use super::source::Source;
use super::state::RuleStateHolder;
use super::style::StyleHolder;
use super::state::StateHolder;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::elements::paragraph::Paragraph;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use ariadne::Color;
#[derive(Debug)]
@ -54,141 +41,79 @@ impl ReportColors {
}
}
/// The state that is shared with the state's childre
pub struct SharedState {
pub rule_state: RefCell<RuleStateHolder>,
/// The lua [`Kernel`]s
pub kernels: RefCell<KernelHolder>,
/// The styles
pub styles: RefCell<StyleHolder>,
/// The layouts
pub layouts: RefCell<LayoutHolder>,
/// The custom styles
pub custom_styles: RefCell<CustomStyleHolder>,
}
impl SharedState {
/// Construct a new empty shared state
pub(self) fn new(parser: &dyn Parser) -> Self {
let s = Self {
rule_state: RefCell::new(RuleStateHolder::default()),
kernels: RefCell::new(KernelHolder::default()),
styles: RefCell::new(StyleHolder::default()),
layouts: RefCell::new(LayoutHolder::default()),
custom_styles: RefCell::new(CustomStyleHolder::default()),
};
// Register default kernel
s.kernels
.borrow_mut()
.insert("main".to_string(), Kernel::new(parser));
// Default styles & layouts
parser.rules().iter().for_each(|rule| {
rule.register_styles(&mut s.styles.borrow_mut());
rule.register_layouts(&mut s.layouts.borrow_mut());
});
s
}
}
/// The state of the parser
pub struct ParserState<'a, 'b> {
/// The parser for which this state exists
pub parser: &'a dyn Parser,
/// The (optional) parent state
parent: Option<&'b ParserState<'a, 'b>>,
/// The position of the matches in the current state
matches: RefCell<Vec<(usize, Option<Box<dyn Any>>)>>,
/// State shared among all states
pub shared: Rc<SharedState>,
}
/// Represents the state of the parser
///
/// This state has some shared data from [`SharedState`] which gets shared
/// with the children of that state, see [`ParserState::with_state`]
impl<'a, 'b> ParserState<'a, 'b> {
/// Constructs a new state for a given parser with an optional parent
pub trait Parser: KernelHolder {
/// Gets the colors for formatting errors
///
/// Parent should be None when parsing a brand new document. If you have to
/// set the parent to Some(..) (e.g for imports or sub-document), be sure
/// to use the [`ParserState::with_state`] method instead, this create a
/// RAII lived state for use within bounded lifetime.
pub fn new(parser: &'a dyn Parser, parent: Option<&'a ParserState<'a, 'b>>) -> Self {
let matches = parser.rules().iter().map(|_| (0, None)).collect::<Vec<_>>();
let shared = if let Some(parent) = &parent {
parent.shared.clone()
} else {
Rc::new(SharedState::new(parser))
};
/// When colors are disabled, all colors should resolve to empty string
fn colors(&self) -> &ReportColors;
Self {
parser,
parent,
matches: RefCell::new(matches),
shared,
fn rules(&self) -> &Vec<Box<dyn Rule>>;
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
fn add_rule(&mut self, rule: Box<dyn Rule>, after: Option<&'static str>) -> Result<(), String> {
// Error on duplicate rule
let rule_name = (*rule).name();
if let Err(e) = self.rules().iter().try_for_each(|rule| {
if (*rule).name() != rule_name {
return Ok(());
}
return Err(format!(
"Attempted to introduce duplicate rule: `{rule_name}`"
));
}) {
return Err(e);
}
match after {
Some(name) => {
let before = self
.rules()
.iter()
.enumerate()
.find(|(_pos, r)| (r).name() == name);
match before {
Some((pos, _)) => self.rules_mut().insert(pos + 1, rule),
_ => {
return Err(format!(
"Unable to find rule named `{name}`, to insert rule `{}` after it",
rule.name()
))
}
}
}
_ => self.rules_mut().push(rule),
}
Ok(())
}
/// Runs a procedure with a new state that inherits the [`SharedState`] state from [`self`]
///
/// Note: When parsing a new document, create a new state, then the parsing process
/// creates states using this method
pub fn with_state<F, R>(&self, f: F) -> R
where
F: FnOnce(ParserState) -> R,
{
let new_state = ParserState::new(self.parser, Some(self));
f(new_state)
}
fn state(&self) -> Ref<'_, StateHolder>;
fn state_mut(&self) -> RefMut<'_, StateHolder>;
/// Updates matches from a given start position e.g [`Cursor`]
///
/// # Return
///
/// 1. The cursor position after updating the matches
/// 2. (Optional) The winning match with it's match data
/// If the winning match is None, it means that the document has no more
/// rule to match. I.e The rest of the content should be added as a
/// [`Text`] element.
/// The match data should be passed to the [`Rule::on_match`] method.
///
/// # Strategy
///
/// This function call [`Rule::next_match`] on the rules defined for the
/// parser. It then takes the rule that has the closest `next_match` and
/// returns it. If next_match starts on an escaped character i.e `\\`,
/// then it starts over to find another match for that rule.
/// In case multiple rules have the same `next_match`, the rules that are
/// defined first in the parser are prioritized. See [Parser::add_rule] for
/// information on how to prioritize rules.
///
/// Notes that the result of every call to [`Rule::next_match`] gets stored
/// in a table: [`ParserState::matches`]. Until the cursor steps over a
/// position in the table, `next_match` won't be called.
pub fn update_matches(&self, cursor: &Cursor) -> (Cursor, Option<(usize, Box<dyn Any>)>) {
let mut matches_borrow = self.matches.borrow_mut();
fn has_error(&self) -> bool;
self.parser
.rules()
// Update [`matches`] and returns the position of the next matched rule.
// If rule is empty, it means that there are no rules left to parse (i.e
// end of document).
fn update_matches(
&self,
cursor: &Cursor,
matches: &mut Vec<(usize, Option<Box<dyn Any>>)>,
) -> (Cursor, Option<&Box<dyn Rule>>, Option<Box<dyn Any>>) {
// Update matches
// TODO: Trivially parellalizable
self.rules()
.iter()
.zip(matches_borrow.iter_mut())
.zip(matches.iter_mut())
.for_each(|(rule, (matched_at, match_data))| {
// Don't upate if not stepped over yet
if *matched_at > cursor.pos {
return;
}
(*matched_at, *match_data) = match rule.next_match(self, cursor) {
(*matched_at, *match_data) = match rule.next_match(cursor) {
None => (usize::MAX, None),
Some((mut pos, mut data)) => {
// Check if escaped
@ -198,7 +123,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
let mut escaped = false;
'inner: loop {
let g = graphemes.next_back();
if g.is_none() || g.unwrap() != "\\" {
if !g.is_some() || g.unwrap() != "\\" {
break 'inner;
}
@ -209,7 +134,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
}
// Find next potential match
(pos, data) = match rule.next_match(self, &cursor.at(pos + 1)) {
(pos, data) = match rule.next_match(&cursor.at(pos + 1)) {
Some((new_pos, new_data)) => (new_pos, new_data),
None => (usize::MAX, data), // Stop iterating
}
@ -221,216 +146,36 @@ impl<'a, 'b> ParserState<'a, 'b> {
});
// Get winning match
let (winner, next_pos) = matches_borrow
let (winner, (next_pos, _match_data)) = matches
.iter()
.enumerate()
.min_by_key(|(_, (pos, _))| pos)
.map(|(winner, (pos, _))| (winner, *pos))
.min_by_key(|(_, (pos, _match_data))| pos)
.unwrap();
if next_pos == usize::MAX
if *next_pos == usize::MAX
// No rule has matched
{
let content = cursor.source.content();
// No winners, i.e no matches left
return (cursor.at(content.len()), None);
return (cursor.at(content.len()), None, None);
}
(
cursor.at(next_pos),
Some((winner, matches_borrow[winner].1.take().unwrap())),
cursor.at(*next_pos),
Some(&self.rules()[winner]),
std::mem::replace(&mut matches[winner].1, None),
)
}
/// Add an [`Element`] to the [`Document`]
pub fn push(&self, doc: &dyn Document, elem: Box<dyn Element>) {
if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
let mut paragraph = doc
.last_element_mut::<Paragraph>()
.or_else(|| {
doc.push(Box::new(Paragraph {
location: elem.location().clone(),
content: Vec::new(),
}));
doc.last_element_mut::<Paragraph>()
})
.unwrap();
paragraph.push(elem).unwrap();
} else {
// Process paragraph events
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
self.parser
.handle_reports(self.shared.rule_state.borrow_mut().on_scope_end(
self,
doc,
super::state::Scope::PARAGRAPH,
));
}
doc.push(elem);
}
}
/// Resets the position and the match_data for a given rule. This is used
/// in order to have 'dynamic' rules that may not match at first, but their
/// matching rule is modified through the parsing process.
///
/// This function also recursively calls itself on it's `parent`, in order
/// to fully reset the match.
///
/// See [`CustomStyleRule`] for an example of how this is used.
///
/// # Error
///
/// Returns an error if `rule_name` was not found in the parser's ruleset.
pub fn reset_match(&self, rule_name: &str) -> Result<(), String>
{
if self.parser.rules().iter()
.zip(self.matches.borrow_mut().iter_mut())
.try_for_each(|(rule, (match_pos, match_data))| {
if rule.name() != rule_name { return Ok(()) }
*match_pos = 0;
match_data.take();
Err(())
}).is_ok()
{
return Err(format!("Could not find rule: {rule_name}"));
}
// Resurcively reset
if let Some(parent) = self.parent
{
return parent.reset_match(rule_name);
}
Ok(())
}
}
pub trait Parser {
/// Gets the colors for formatting errors
///
/// When colors are disabled, all colors should resolve to empty string
fn colors(&self) -> &ReportColors;
/// Gets a reference to all the [`Rule`]s defined for the parser
fn rules(&self) -> &Vec<Box<dyn Rule>>;
/// Gets a mutable reference to all the [`Rule`]s defined for the parser
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
/// Whether the parser emitted an error during it's parsing process
fn has_error(&self) -> bool;
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>);
/// Parse [`Source`] into a new [`Document`]
///
/// # Errors
///
/// This method will not fail because we try to optimistically recover from
/// parsing errors. However the resulting document should not get compiled
/// if an error has happenedn, see [`Parser::has_error()`] for reference
///
/// # Returns
///
/// This method returns the resulting [`Document`] after psrsing `source`,
/// note that the [`ParserState`] is only meant to perform testing and not
/// meant to be reused.
fn parse<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
fn parse<'a>(
&self,
source: Rc<dyn Source>,
parent: Option<&'doc dyn Document<'doc>>,
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>);
parent: Option<&'a dyn Document<'a>>,
) -> Box<dyn Document<'a> + 'a>;
/// Parse [`Source`] into an already existing [`Document`]
///
/// # Errors
///
/// This method will not fail because we try to optimistically recover from
/// parsing errors. However the resulting document should not get compiled
/// if an error has happened see [`Parser::has_error()`] for reference
///
/// # Returns
///
/// The returned [`ParserState`] is not meant to be reused, it's meant for
/// testing.
fn parse_into<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
source: Rc<dyn Source>,
document: &'doc dyn Document<'doc>,
) -> ParserState<'p, 'a>;
/// Adds a rule to the parser.
///
/// # Warning
///
/// This method must not be called if a [`ParserState`] for this parser exists.
fn add_rule(&mut self, rule: Box<dyn Rule>) -> Result<(), String> {
if self
.rules()
.iter()
.any(|other_rule| other_rule.name() == rule.name())
{
return Err(format!(
"Attempted to introduce duplicate rule: `{}`",
rule.name()
));
}
self.rules_mut().push(rule);
Ok(())
}
/// Handles the reports produced by parsing. The default is to output them
/// to stderr, but you are free to modify it.
fn handle_reports(&self, reports: Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
for mut report in reports {
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
sources.insert(source.clone());
if let Some(parent) = source.location() {
let parent_source = parent.source();
if sources.get(&parent_source).is_none() {
recurse_source(sources, parent_source);
}
}
}
report.labels.iter().for_each(|label| {
recurse_source(&mut sources, label.span.0.clone());
});
let cache = sources
.iter()
.map(|source| (source.clone(), source.content().clone()))
.collect::<Vec<(Rc<dyn Source>, String)>>();
cache.iter().for_each(|(source, _)| {
if let Some(location) = source.location() {
if let Some(_s) = source.downcast_ref::<SourceFile>() {
report.labels.push(
Label::new((location.source(), location.start() + 1..location.end()))
.with_message("In file included from here")
.with_order(-1),
);
};
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
let start = location.start()
+ if location.source().content().as_bytes()[location.start()]
== b'\n' { 1 } else { 0 };
report.labels.push(
Label::new((location.source(), start..location.end()))
.with_message("In evaluation of")
.with_order(-1),
);
};
}
});
report.eprint(ariadne::sources(cache)).unwrap()
}
}
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>);
}

View file

@ -1,98 +1,32 @@
use super::layout::LayoutHolder;
use super::parser::ParserState;
use super::parser::Parser;
use super::source::Cursor;
use super::source::Source;
use super::source::Token;
use super::style::StyleHolder;
use crate::document::document::Document;
use ariadne::Report;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
use mlua::Function;
use mlua::Lua;
use std::any::Any;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
macro_rules! create_registry {
( $($construct:expr),+ $(,)? ) => {{
let mut map = HashMap::new();
$(
let boxed = Box::new($construct) as Box<dyn Rule>;
map.insert(boxed.name(), boxed);
)+
map
}};
}
/// Gets the list of all rules exported with the [`auto_registry`] proc macro.
/// Rules are sorted according to topological order using the [`Rule::previous`] method.
#[auto_registry::generate_registry(registry = "rules", target = make_rules, return_type = HashMap<&'static str, Box<dyn Rule>>, maker = create_registry)]
pub fn get_rule_registry() -> Vec<Box<dyn Rule>> {
fn cmp(
map: &HashMap<&'static str, Box<dyn Rule>>,
lname: &'static str,
rname: &'static str,
) -> std::cmp::Ordering {
let l = map.get(lname).unwrap();
let r = map.get(rname).unwrap();
if l.previous() == Some(r.name()) {
std::cmp::Ordering::Greater
} else if r.previous() == Some(l.name()) {
std::cmp::Ordering::Less
} else if l.previous().is_some() && r.previous().is_none() {
std::cmp::Ordering::Greater
} else if r.previous().is_some() && l.previous().is_none() {
std::cmp::Ordering::Less
} else if let (Some(pl), Some(pr)) = (l.previous(), r.previous()) {
cmp(map, pl, pr)
} else {
std::cmp::Ordering::Equal
}
}
let mut map = make_rules();
let mut sorted_keys = map.keys().copied().collect::<Vec<_>>();
sorted_keys.sort_by(|l, r| cmp(&map, l, r));
let mut owned = Vec::with_capacity(sorted_keys.len());
for key in sorted_keys {
let rule = map.remove(key).unwrap();
owned.push(rule);
}
owned
}
pub trait Rule: Downcast {
/// The rule name
pub trait Rule {
/// Returns rule's name
fn name(&self) -> &'static str;
/// The name of the rule that should come before this one
fn previous(&self) -> Option<&'static str>;
/// Finds the next match starting from [`cursor`]
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
/// Callback when rule matches
fn on_match<'a>(
&self,
state: &ParserState,
parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
match_data: Box<dyn Any>,
match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>);
/// Registers lua bindings
fn register_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
/// Registers default styles
fn register_styles(&self, _holder: &mut StyleHolder) {}
/// Registers default layouts
fn register_layouts(&self, _holder: &mut LayoutHolder) {}
/// Export bindings to lua
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
}
impl_downcast!(Rule);
impl core::fmt::Debug for dyn Rule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -100,12 +34,47 @@ impl core::fmt::Debug for dyn Rule {
}
}
pub trait RegexRule {
/// The rule name
/*
pub trait RegexRule: Rule
{
fn name(&self) -> &'static str;
/// The name of the rule that should come before this one
fn previous(&self) -> Option<&'static str>;
/// Returns the rule's regex
fn regex(&self) -> &regex::Regex;
/// Callback on regex rule match
fn on_regex_match<'a>(&self, parser: &Parser, document: &Document, token: Token<'a>, matches: regex::Captures) -> Vec<Report<'a, (String, Range<usize>)>>;
}
impl<T: RegexRule> Rule for T {
fn name(&self) -> &'static str { RegexRule::name(self) }
/// Finds the next match starting from [`cursor`]
fn next_match<'a>(&self, cursor: &'a Cursor) -> Option<usize>
{
let re = self.regex();
let content = cursor.file.content.as_ref().unwrap();
match re.find_at(content.as_str(), cursor.pos)
{
Some(m) => Some(m.start()),
None => None,
}
}
fn on_match<'a>(&self, parser: &Parser, document: &Document, cursor: Cursor<'a>) -> (Cursor<'a>, Vec<Report<'a, (String, Range<usize>)>>)
{
let content = cursor.file.content.as_ref().unwrap();
let matches = self.regex().captures_at(content.as_str(), cursor.pos).unwrap();
let token = Token::new(cursor.pos, matches.get(0).unwrap().len(), cursor.file);
let token_end = token.end();
(cursor.at(token_end), self.on_regex_match(parser, document, token, matches))
}
}
*/
pub trait RegexRule {
fn name(&self) -> &'static str;
/// Returns the rule's regexes
fn regexes(&self) -> &[regex::Regex];
@ -114,51 +83,55 @@ pub trait RegexRule {
fn on_regex_match<'a>(
&self,
index: usize,
state: &ParserState,
parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a),
token: Token,
matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>;
fn register_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
fn register_styles(&self, _holder: &mut StyleHolder) {}
fn register_layouts(&self, _holder: &mut LayoutHolder) {}
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
}
impl<T: RegexRule + 'static> Rule for T {
fn name(&self) -> &'static str { RegexRule::name(self) }
fn previous(&self) -> Option<&'static str> { RegexRule::previous(self) }
impl<T: RegexRule> Rule for T {
fn name(&self) -> &'static str {
RegexRule::name(self)
}
/// Finds the next match starting from [`cursor`]
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
let content = cursor.source.content();
let mut found: Option<(usize, usize)> = None;
self.regexes().iter().enumerate().for_each(|(id, re)| {
if let Some(m) = re.find_at(content.as_str(), cursor.pos) {
found = found
.map(|(f_pos, f_id)| {
.and_then(|(f_pos, f_id)| {
if f_pos > m.start() {
(m.start(), id)
Some((m.start(), id))
} else {
(f_pos, f_id)
Some((f_pos, f_id))
}
})
.or(Some((m.start(), id)));
}
});
found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>))
return found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>));
}
fn on_match<'a>(
&self,
state: &ParserState,
parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
match_data: Box<dyn Any>,
match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let content = cursor.source.content();
let index = match_data.downcast::<usize>().unwrap();
let index = unsafe {
match_data
.unwrap_unchecked()
.downcast::<usize>()
.unwrap_unchecked()
};
let re = &self.regexes()[*index];
let captures = re.captures_at(content.as_str(), cursor.pos).unwrap();
@ -167,54 +140,11 @@ impl<T: RegexRule + 'static> Rule for T {
let token_end = token.end();
return (
cursor.at(token_end),
self.on_regex_match(*index, state, document, token, captures),
self.on_regex_match(*index, parser, document, token, captures),
);
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
self.register_bindings(lua)
}
fn register_styles(&self, holder: &mut StyleHolder) { self.register_styles(holder); }
fn register_layouts(&self, holder: &mut LayoutHolder) { self.register_layouts(holder); }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn registry() {
let rules = get_rule_registry();
let names: Vec<&'static str> = rules.iter().map(|rule| rule.name()).collect();
assert_eq!(
names,
vec![
"Comment",
"Paragraph",
"Import",
"Script",
"Element Style",
"Variable",
"Variable Substitution",
"Raw",
"List",
"Blockquote",
"Code",
"Tex",
"Graphviz",
"Media",
"Layout",
"Style",
"Custom Style",
"Section",
"Link",
"Text",
"Reference",
]
);
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
self.lua_bindings(lua)
}
}

View file

@ -24,6 +24,7 @@ impl core::fmt::Display for dyn Source {
}
impl core::fmt::Debug for dyn Source {
// TODO
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Source{{{}}}", self.name())
}
@ -54,10 +55,10 @@ impl SourceFile {
pub fn new(path: String, location: Option<Token>) -> Result<Self, String> {
match fs::read_to_string(&path) {
Err(_) => {
Err(format!(
return Err(String::from(format!(
"Unable to read file content: `{}`",
path
))
)))
}
Ok(content) => Ok(Self {
location,
@ -69,9 +70,9 @@ impl SourceFile {
pub fn with_content(path: String, content: String, location: Option<Token>) -> Self {
Self {
location,
path,
content,
location: location,
path: path,
content: content,
}
}
}
@ -161,7 +162,7 @@ impl Token {
}
pub fn source(&self) -> Rc<dyn Source> {
self.source.clone()
return self.source.clone();
}
/// Construct Token from a range
@ -175,10 +176,10 @@ impl Token {
}
pub fn start(&self) -> usize {
self.range.start
return self.range.start;
}
pub fn end(&self) -> usize {
self.range.end
return self.range.end;
}
}

View file

@ -9,7 +9,7 @@ use downcast_rs::Downcast;
use crate::document::document::Document;
use super::parser::ParserState;
use super::parser::Parser;
use super::source::Source;
/// Scope for state objects
@ -25,69 +25,75 @@ pub enum Scope {
PARAGRAPH = 2,
}
pub trait RuleState: Downcast {
pub trait State: Downcast {
/// Returns the state's [`Scope`]
fn scope(&self) -> Scope;
/// Callback called when state goes out of scope
fn on_remove<'a>(
&self,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>;
}
impl_downcast!(RuleState);
impl_downcast!(State);
impl core::fmt::Debug for dyn RuleState {
impl core::fmt::Debug for dyn State {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "State{{Scope: {:#?}}}", self.scope())
}
}
/// Object owning all the states
#[derive(Default)]
pub struct RuleStateHolder {
states: HashMap<String, Rc<RefCell<dyn RuleState>>>,
#[derive(Debug)]
pub struct StateHolder {
data: HashMap<String, Rc<RefCell<dyn State>>>,
}
impl RuleStateHolder {
impl StateHolder {
pub fn new() -> Self {
Self {
data: HashMap::new(),
}
}
// Attempts to push [`state`]. On collision, returns an error with the already present state
pub fn insert(
&mut self,
name: String,
state: Rc<RefCell<dyn RuleState>>,
) -> Result<Rc<RefCell<dyn RuleState>>, String> {
if self.states.contains_key(name.as_str()) {
return Err(format!("Attempted to insert duplicate RuleState: {name}"));
state: Rc<RefCell<dyn State>>,
) -> Result<Rc<RefCell<dyn State>>, Rc<RefCell<dyn State>>> {
match self.data.insert(name, state.clone()) {
Some(state) => Err(state),
_ => Ok(state),
}
self.states.insert(name, state.clone());
Ok(state)
}
pub fn get(&self, state_name: &str) -> Option<Rc<RefCell<dyn RuleState>>> {
self.states.get(state_name).cloned()
pub fn query(&self, name: &String) -> Option<Rc<RefCell<dyn State>>> {
self.data.get(name).map_or(None, |st| Some(st.clone()))
}
pub fn on_scope_end(
&mut self,
state: &ParserState,
parser: &dyn Parser,
document: &dyn Document,
scope: Scope,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let mut result = vec![];
self.states.retain(|_name, rule_state| {
if rule_state.borrow().scope() >= scope {
rule_state
.borrow_mut()
.on_remove(state, document)
self.data.retain(|_name, state| {
if state.borrow().scope() >= scope {
state
.borrow()
.on_remove(parser, document)
.drain(..)
.for_each(|report| reports.push(report));
.for_each(|report| result.push(report));
false
} else {
true
}
});
reports
return result;
}
}

View file

@ -1,78 +0,0 @@
use std::collections::HashMap;
use std::rc::Rc;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
/// Styling for an element
pub trait ElementStyle: Downcast + core::fmt::Debug {
/// The style key
fn key(&self) -> &'static str;
/// Attempts to create a new style from a [`json`] string
///
/// # Errors
///
/// Will fail if deserialization fails
fn from_json(&self, json: &str) -> Result<Rc<dyn ElementStyle>, String>;
/// Attempts to deserialize lua table into a new style
fn from_lua(
&self,
lua: &mlua::Lua,
value: mlua::Value,
) -> Result<Rc<dyn ElementStyle>, mlua::Error>;
}
impl_downcast!(ElementStyle);
#[derive(Default)]
pub struct StyleHolder {
styles: HashMap<String, Rc<dyn ElementStyle>>,
}
impl StyleHolder {
/// Checks if a given style key is registered
pub fn is_registered(&self, style_key: &str) -> bool { self.styles.contains_key(style_key) }
/// Gets the current active style for an element
/// NOTE: Will panic if a style is not defined for a given element
/// If you need to process user input, use [`is_registered`]
pub fn current(&self, style_key: &str) -> Rc<dyn ElementStyle> {
self.styles.get(style_key).cloned().unwrap()
}
/// Sets the [`style`]
pub fn set_current(&mut self, style: Rc<dyn ElementStyle>) {
self.styles.insert(style.key().to_string(), style);
}
}
#[macro_export]
macro_rules! impl_elementstyle {
($t:ty, $key:expr) => {
impl $crate::parser::style::ElementStyle for $t {
fn key(&self) -> &'static str { $key }
fn from_json(
&self,
json: &str,
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, String> {
serde_json::from_str::<$t>(json)
.map_err(|e| e.to_string())
.map(|obj| {
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
})
}
fn from_lua(
&self,
lua: &mlua::Lua,
value: mlua::Value,
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, mlua::Error> {
mlua::LuaSerdeExt::from_value::<$t>(lua, value).map(|obj| {
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
})
}
}
};
}

View file

@ -8,7 +8,7 @@ use crate::document::document::DocumentAccessors;
use crate::document::element::ElemKind;
use crate::elements::paragraph::Paragraph;
use super::parser::ParserState;
use super::parser::Parser;
use super::source::Source;
/// Processes text for escape characters and paragraphing
@ -36,7 +36,7 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
.last_element::<Paragraph>()
.and_then(|par| {
par.find_back(|e| e.kind() != ElemKind::Invisible)
.map(|e| e.kind() == ElemKind::Inline)
.and_then(|e| Some(e.kind() == ElemKind::Inline))
})
.unwrap_or(false)
{
@ -79,12 +79,12 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
}
}
(out + g, Some(g))
return (out + g, Some(g));
})
.0
.to_string();
processed
return processed;
}
/// Processed a string and escapes a single token out of it
@ -111,7 +111,7 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
escaped += 1;
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
let _ = token_it.next();
if token_it.peek().is_none() {
if token_it.peek() == None {
(0..(escaped / 2)).for_each(|_| processed.push(escape));
escaped = 0;
token_it = token.chars().peekable();
@ -136,25 +136,17 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
/// Parses source into a single paragraph
/// If source contains anything but a single paragraph, an error is returned
pub fn parse_paragraph<'a>(
state: &ParserState,
parser: &dyn Parser,
source: Rc<dyn Source>,
document: &'a dyn Document<'a>,
) -> Result<Box<Paragraph>, &'static str> {
let parsed = state.with_state(|new_state| -> Box<dyn Document> {
new_state
.parser
.parse(new_state, source.clone(), Some(document))
.0
});
let parsed = parser.parse(source.clone(), Some(document));
if parsed.content().borrow().len() > 1 {
return Err("Parsed document contains more than a single paragraph");
} else if parsed.content().borrow().len() == 0 {
return Err("Parsed document is empty");
} else if parsed.last_element::<Paragraph>().is_none() {
return Err("Parsed element is not a paragraph");
} else if state.parser.has_error() {
// FIXME: If parser had an error before, this wold trigger
return Err("Parser error");
}
let paragraph = parsed.content().borrow_mut().pop().unwrap();
@ -237,12 +229,13 @@ impl<'a> PropertyMap<'a> {
}
}
#[derive(Debug)]
pub struct PropertyParser {
pub properties: HashMap<String, Property>,
properties: HashMap<String, Property>,
}
impl PropertyParser {
pub fn new(properties: HashMap<String, Property>) -> Self { Self { properties } }
/// Attempts to build a default propertymap
///
/// Returns an error if at least one [`Property`] is required and doesn't provide a default
@ -278,7 +271,7 @@ impl PropertyParser {
/// properties.insert("width".to_string(),
/// Property::new(true, "Width of the element in em".to_string(), None));
///
/// let parser = PropertyParser { properties };
/// let parser = PropertyParser::new(properties);
/// let pm = parser.parse("width=15").unwrap();
///
/// assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15);
@ -333,8 +326,9 @@ impl PropertyParser {
escaped = 0;
in_name = true;
try_insert(&name, &value)?;
if let Err(e) = try_insert(&name, &value) {
return Err(e);
}
name.clear();
value.clear();
} else {
@ -360,7 +354,9 @@ impl PropertyParser {
return Err("Expected non empty property list.".to_string());
}
try_insert(&name, &value)?;
if let Err(e) = try_insert(&name, &value) {
return Err(e);
}
if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| {
if !properties.properties.contains_key(key) {
@ -420,26 +416,20 @@ mod tests {
(&doc as &dyn Document)
.last_element_mut::<Paragraph>()
.unwrap()
.push(Box::new(Comment {
location: tok.clone(),
content: "COMMENT".into(),
}))
.unwrap();
.push(Box::new(Comment::new(tok.clone(), "COMMENT".to_string())));
assert_eq!(process_text(&doc, "\na"), "a");
// A space is appended as previous element is inline
(&doc as &dyn Document)
.last_element_mut::<Paragraph>()
.unwrap()
.push(Box::new(Text::new(tok.clone(), "TEXT".to_string())))
.unwrap();
.push(Box::new(Text::new(tok.clone(), "TEXT".to_string())));
assert_eq!(process_text(&doc, "\na"), " a");
(&doc as &dyn Document)
.last_element_mut::<Paragraph>()
.unwrap()
.push(Box::new(Style::new(tok.clone(), 0, false)))
.unwrap();
.push(Box::new(Style::new(tok.clone(), 0, false)));
assert_eq!(process_text(&doc, "\na"), " a");
}
@ -501,7 +491,7 @@ mod tests {
Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())),
);
let parser = PropertyParser { properties };
let parser = PropertyParser::new(properties);
let pm = parser.parse("width=15,length=-10").unwrap();
// Ok

157
style.css
View file

@ -2,37 +2,9 @@ body {
background-color: #1b1b1d;
color: #c5c5c5;
font-family: sans-serif;
margin: 0;
padding: 0;
}
.layout {
display: flex;
}
.content {
max-width: 99ch;
max-width: 90ch;
margin: 0 auto;
padding: 0;
width: 100%;
}
/* Layouts */
div.centered {
text-align: center;
}
div.split-container {
display: flex;
width: 100%;
}
div.split-container > div.split {
flex: 1;
flex-shrink: 0;
overflow-x: auto;
margin: 0.5em;
}
/* Styles */
@ -57,89 +29,70 @@ a.inline-code {
}
/* Navbar */
.navbar {
display: none;
left: 0;
top: 0;
bottom: 0;
width: max(calc((100vw - 99ch) / 2 - 15vw), 24ch);
height: 100vh;
position: fixed;
margin-right: 1em;
#navbar {
left: 0;
top: 0;
bottom: 0;
width: max(16vw, 20ch);
overflow-y: auto;
box-sizing: border-box;
overscroll-behavior-y: contain;
position: absolute;
box-sizing: border-box;
overscroll-behavior-y: contain;
background-color: #161a26;
background-color: #161a26;
color: #aaa;
font-size: 0.9em;
font-size: 0.9em;
font-weight: bold;
}
@media (min-width: 130ch) {
.navbar {
display: block;
}
.container {
flex-direction: row;
}
}
.navbar a {
#navbar a {
color: #ffb454;
text-decoration: none;
font-weight: normal;
}
.navbar li {
#navbar li {
display: block;
position: relative;
padding-left: 1em;
margin-left: 0em;
}
.navbar ul {
#navbar ul {
margin-left: 0em;
padding-left: 0;
}
.navbar summary{
#navbar summary{
display: block;
cursor: pointer;
}
.navbar summary::marker,
.navbar summary::-webkit-details-marker{
#navbar summary::marker,
#navbar summary::-webkit-details-marker{
display: none;
}
.navbar summary:focus{
#navbar summary:focus{
outline: none;
}
.navbar summary:focus-visible{
#navbar summary:focus-visible{
outline: 1px dotted #000;
}
.navbar summary:before {
content: "+";
color: #ffb454;
float: left;
text-align: center;
width: 1em;
#navbar summary:before {
content: "+";
color: #ffb454;
float: left;
text-align: center;
width: 1em;
}
.navbar details[open] > summary:before {
content: "";
}
/* Sections */
a.section-link {
text-decoration: none;
#navbar details[open] > summary:before {
content: "";
}
/* Code blocks */
@ -149,10 +102,11 @@ div.code-block-title {
}
div.code-block-content {
max-height: 38em;
max-height: 20em;
margin-bottom: 0.2em;
width: auto;
overflow: scroll;
overflow: auto;
background-color: #0f141a;
}
@ -165,7 +119,6 @@ div.code-block-content td {
div.code-block-content pre {
border: 0;
margin: 0;
tab-size: 4;
}
div.code-block-content .code-block-gutter {
@ -201,7 +154,7 @@ div.code-block-content .code-block-line {
margin-right: .5em;
}
.medium img, video, audio {
.medium img {
max-width: 100%;
}
@ -227,7 +180,6 @@ a.medium-ref {
font-weight: bold;
color: #d367c1;
text-decoration: none;
}
a.medium-ref:hover {
@ -239,50 +191,11 @@ a.medium-ref img {
margin: 1.3em 0 0 0;
}
a.medium-ref video {
display: none;
margin: 1.3em 0 0 0;
}
a:hover.medium-ref img {
max-width: 25%;
left: 37.5%;
max-width: 50%;
margin: auto;
display: inline-block;
position: absolute;
box-shadow: 0px 0px 6px 2px rgba(0, 0, 0, 0.75);
box-shadow: 0px 0px 6px 2px rgba(0,0,0,0.75);
}
a:hover.medium-ref video {
max-width: 25%;
left: 37.5%;
display: inline-block;
position: absolute;
box-shadow: 0px 0px 6px 2px rgba(0, 0, 0, 0.75);
}
/* Blockquote */
blockquote {
margin-left: 0.2em;
padding-left: 0.6em;
border-left: 4px solid #0ff08b;
}
blockquote p::before {
content: '\201C';
}
blockquote p::after {
content: '\201D';
}
.blockquote-author:before {
content: '—';
}
.blockquote-author {
margin-left: 0.2em;
}

View file

@ -1,7 +1,5 @@
#!/usr/bin/env python3
"""latex2svg
-- This version of latex2svg comes with NML and has been modified to work with it only --
-- The original version can be found here : https://github.com/Moonbase59/latex2svg --
Read LaTeX code from stdin and render a SVG using LaTeX, dvisvgm and svgo.
@ -25,6 +23,38 @@ import re
from tempfile import TemporaryDirectory
from ctypes.util import find_library
default_template = r"""
\documentclass[{{ fontsize }}pt,preview]{standalone}
{{ preamble }}
\begin{document}
\begin{preview}
{{ code }}
\end{preview}
\end{document}
"""
default_preamble = r"""
\usepackage[utf8x]{inputenc}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{amstext}
\usepackage{newtxtext}
\usepackage[libertine]{newtxmath}
% prevent errors from old font commands
\DeclareOldFontCommand{\rm}{\normalfont\rmfamily}{\mathrm}
\DeclareOldFontCommand{\sf}{\normalfont\sffamily}{\mathsf}
\DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt}
\DeclareOldFontCommand{\bf}{\normalfont\bfseries}{\mathbf}
\DeclareOldFontCommand{\it}{\normalfont\itshape}{\mathit}
\DeclareOldFontCommand{\sl}{\normalfont\slshape}{\@nomath\sl}
\DeclareOldFontCommand{\sc}{\normalfont\scshape}{\@nomath\sc}
% prevent errors from undefined shortcuts
\newcommand{\N}{\mathbb{N}}
\newcommand{\R}{\mathbb{R}}
\newcommand{\Z}{\mathbb{Z}}
"""
default_svgo_config = r"""
module.exports = {
plugins: [
@ -54,6 +84,8 @@ svgo_cmd = 'svgo'
default_params = {
'fontsize': 12, # TeX pt
'template': default_template,
'preamble': default_preamble,
'latex_cmd': latex_cmd,
'dvisvgm_cmd': dvisvgm_cmd,
'svgo_cmd': svgo_cmd,
@ -205,15 +237,22 @@ def main():
""")
parser.add_argument('--version', action='version',
version='%(prog)s {version}'.format(version=__version__))
parser.add_argument('--preamble',
help="LaTeX preamble code to read from file")
parser.add_argument('--fontsize',
help="LaTeX fontsize in pt")
args = parser.parse_args()
preamble = default_preamble
if args.preamble is not None:
with open(args.preamble) as f:
preamble = f.read()
fontsize = 12
if args.fontsize is not None:
fontsize = int(args.fontsize)
latex = sys.stdin.read()
try:
params = default_params.copy()
params['preamble'] = preamble
params['fontsize'] = fontsize
out = latex2svg(latex, params)
sys.stdout.write(out['svg'])