Compare commits

..

No commits in common. "master" and "batch" have entirely different histories.

66 changed files with 2183 additions and 7986 deletions

96
Cargo.lock generated
View file

@ -56,17 +56,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
]
[[package]]
name = "auto-registry"
version = "0.0.4"
dependencies = [
"lazy_static",
"proc-macro2",
"quote",
"syn 1.0.109",
] ]
[[package]] [[package]]
@ -77,7 +67,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -287,16 +277,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "erased-serde"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d"
dependencies = [
"serde",
"typeid",
]
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.3.9" version = "0.3.9"
@ -409,7 +389,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -716,13 +696,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d111deb18a9c9bd33e1541309f4742523bfab01d276bfa9a27519f6de9c11dc7" checksum = "d111deb18a9c9bd33e1541309f4742523bfab01d276bfa9a27519f6de9c11dc7"
dependencies = [ dependencies = [
"bstr", "bstr",
"erased-serde",
"mlua-sys", "mlua-sys",
"num-traits", "num-traits",
"once_cell", "once_cell",
"rustc-hash", "rustc-hash",
"serde",
"serde-value",
] ]
[[package]] [[package]]
@ -743,7 +720,6 @@ name = "nml"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ariadne", "ariadne",
"auto-registry",
"dashmap 6.0.1", "dashmap 6.0.1",
"downcast-rs", "downcast-rs",
"getopts", "getopts",
@ -752,9 +728,7 @@ dependencies = [
"lsp-server", "lsp-server",
"lsp-types 0.97.0", "lsp-types 0.97.0",
"mlua", "mlua",
"rand 0.8.5",
"regex", "regex",
"runtime-format",
"rusqlite", "rusqlite",
"rust-crypto", "rust-crypto",
"serde", "serde",
@ -828,15 +802,6 @@ dependencies = [
"pkg-config", "pkg-config",
] ]
[[package]]
name = "ordered-float"
version = "2.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "parking_lot_core" name = "parking_lot_core"
version = "0.9.10" version = "0.9.10"
@ -887,7 +852,7 @@ dependencies = [
"pest_meta", "pest_meta",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -918,7 +883,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -966,9 +931,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.86" version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@ -984,9 +949,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.36" version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -1106,15 +1071,6 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "runtime-format"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09958d5b38bca768ede7928c767c89a08ba568144a7b61992aecae79b03c8c94"
dependencies = [
"tinyvec",
]
[[package]] [[package]]
name = "rusqlite" name = "rusqlite"
version = "0.31.0" version = "0.31.0"
@ -1203,16 +1159,6 @@ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]]
name = "serde-value"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
dependencies = [
"ordered-float",
"serde",
]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.204" version = "1.0.204"
@ -1221,7 +1167,7 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1243,7 +1189,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1285,9 +1231,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.72" version = "2.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1345,7 +1291,7 @@ checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1425,7 +1371,7 @@ checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1492,7 +1438,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1520,7 +1466,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]
[[package]] [[package]]
@ -1532,12 +1478,6 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "typeid"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "059d83cc991e7a42fc37bd50941885db0888e34209f8cfd9aab07ddec03bc9cf"
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.17.0" version = "1.17.0"
@ -1783,5 +1723,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.72", "syn 2.0.53",
] ]

View file

@ -17,7 +17,6 @@ inherits = "release"
debug = true debug = true
[dependencies] [dependencies]
auto-registry = { path = "crates/auto-registry" }
ariadne = "0.4.1" ariadne = "0.4.1"
dashmap = "6.0.1" dashmap = "6.0.1"
downcast-rs = "1.2.1" downcast-rs = "1.2.1"
@ -26,23 +25,15 @@ graphviz-rust = "0.9.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
lsp-server = "0.7.6" lsp-server = "0.7.6"
lsp-types = "0.97.0" lsp-types = "0.97.0"
mlua = { version = "0.9.9", features = ["lua54", "vendored", "serialize"] } mlua = { version = "0.9.9", features = ["lua54", "vendored"] }
regex = "1.10.3" regex = "1.10.3"
rusqlite = "0.31.0" rusqlite = "0.31.0"
rust-crypto = "0.2.36" rust-crypto = "0.2.36"
serde = "1.0.204" serde = "1.0.204"
serde_json = "1.0.120" serde_json = "1.0.120"
syntect = "5.2.0" syntect = "5.2.0"
tokio = { version = "1.38.1", features = [ tokio = { version = "1.38.1", features = ["macros", "rt-multi-thread", "io-std"]}
"macros",
"rt-multi-thread",
"io-std",
] }
tower-lsp = "0.20.0" tower-lsp = "0.20.0"
unicode-segmentation = "1.11.0" unicode-segmentation = "1.11.0"
walkdir = "2.5.0" walkdir = "2.5.0"
runtime-format = "0.1.3"
[dev-dependencies]
rand = "0.8.5"

View file

@ -35,9 +35,9 @@ cargo build --release --bin nml
- [x] LaTeX rendering - [x] LaTeX rendering
- [x] Graphviz rendering - [x] Graphviz rendering
- [x] Media - [x] Media
- [x] References - [ ] References
- [x] Navigation - [ ] Navigation
- [x] Cross-Document references - [ ] Cross-Document references
- [ ] Complete Lua api - [ ] Complete Lua api
- [ ] Documentation - [ ] Documentation
- [ ] Table - [ ] Table

View file

@ -1,54 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "auto-registry"
version = "0.0.4"
dependencies = [
"lazy_static",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "proc-macro2"
version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View file

@ -1,13 +0,0 @@
[package]
name = "auto-registry"
version = "0.0.4"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0"}
quote = "1.0"
syn = { version = "1.0", features = [ "full" ] }
lazy_static = "1.5.0"

View file

@ -1,296 +0,0 @@
#![feature(proc_macro_span)]
use std::cell::RefCell;
use std::collections::HashMap;
use lazy_static::lazy_static;
use proc_macro::TokenStream;
use quote::quote;
use std::sync::Mutex;
use syn::parse::Parse;
use syn::parse::ParseStream;
use syn::parse_macro_input;
use syn::ItemStruct;
lazy_static! {
/// The registry, each key corresponds to an identifier that needs to be
/// valid in the context of the [`genegenerate_registry`] macro.
static ref REGISTRY: Mutex<RefCell<HashMap<String, Vec<String>>>> =
Mutex::new(RefCell::new(HashMap::new()));
}
/// Arguments for the [`auto_registry`] proc macro
struct AutoRegistryArgs {
/// The registry name
registry: syn::LitStr,
/// The absolute path to the struct, if not specified the macro will try
/// to automatically infer the full path.
path: Option<syn::LitStr>,
}
/// Parser for [`AutoRegistryArgs`]
impl Parse for AutoRegistryArgs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut registry = None;
let mut path = None;
loop {
let key: syn::Ident = input.parse()?;
input.parse::<syn::Token![=]>()?;
let value: syn::LitStr = input.parse()?;
match key.to_string().as_str() {
"registry" => registry = Some(value),
"path" => path = Some(value),
_ => {
return Err(syn::Error::new(
key.span(),
format!(
"Unknown attribute `{}`, excepted `registry` or `path`",
key.to_string()
),
))
}
}
if input.is_empty() {
break;
}
input.parse::<syn::Token![,]>()?;
}
if registry.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `registry`".to_string(),
));
}
Ok(AutoRegistryArgs {
registry: registry.unwrap(),
path,
})
}
}
/// The proc macro used on a struct to add it to the registry
///
/// # Attributes
/// - registry: (String) Name of the registry to collect the struct into
/// - path: (Optional String) The crate path in which the struct is located
/// If left empty, the path will be try to be automatically-deduced
///
/// # Note
///
/// Due to a lacking implementation of `proc_macro_span` in rust-analyzer,
/// it is highly advised the set the `path` attribute when using this macro.
/// See https://github.com/rust-lang/rust-analyzer/issues/15950
#[proc_macro_attribute]
pub fn auto_registry(attr: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as AutoRegistryArgs);
let input = parse_macro_input!(input as ItemStruct);
let ident = &input.ident;
let path = if let Some(path) = args.path {
let value = path.value();
if value.is_empty() {
value
} else {
format!("{}::{}", value, ident.to_string().as_str())
}
} else {
// Attempt to get the path in a hacky way in case the path wasn't
// specified as an attribute to the macro
let path = match input
.ident
.span()
.unwrap()
.source_file()
.path()
.canonicalize()
{
Ok(path) => path,
Err(e) => {
return syn::Error::new(
input.ident.span(),
format!("Failed to canonicalize path: {}", e),
)
.to_compile_error()
.into();
}
};
let crate_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let relative_path = path.strip_prefix(&crate_path).unwrap();
let relative_path_str = relative_path.to_string_lossy();
// Remove the first path component e.g "src/"
let pos = if let Some(pos) = relative_path_str.find("/") {
pos + 1
} else {
0
};
let module_path = relative_path_str
.split_at(pos)
.1
.strip_suffix(".rs")
.unwrap()
.replace("/", "::");
if module_path.is_empty() {
format!("crate::{}", ident.to_string())
} else {
format!("crate::{module_path}::{}", ident.to_string())
}
};
let reg_mtx = REGISTRY.lock().unwrap();
let mut reg_borrow = reg_mtx.borrow_mut();
if let Some(ref mut vec) = reg_borrow.get_mut(args.registry.value().as_str()) {
vec.push(path);
} else {
reg_borrow.insert(args.registry.value(), vec![path]);
}
quote! {
#input
}
.into()
}
/// Arguments for the [`generate_registry`] proc macro
struct GenerateRegistryArgs {
/// The registry name
registry: syn::LitStr,
/// The target, i.e the generated function name
target: syn::Ident,
/// The maker macro, takes all constructed items and processes them
maker: syn::Expr,
/// The return type for the function
return_type: syn::Type,
}
/// Parser for [`GenerateRegistryArgs`]
impl Parse for GenerateRegistryArgs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut registry = None;
let mut target = None;
let mut maker = None;
let mut return_type = None;
loop {
let key: syn::Ident = input.parse()?;
input.parse::<syn::Token![=]>()?;
match key.to_string().as_str() {
"registry" => registry = Some(input.parse()?),
"target" => target = Some(input.parse()?),
"maker" => maker = Some(input.parse()?),
"return_type" => return_type = Some(input.parse()?),
_ => {
return Err(syn::Error::new(
key.span(),
format!(
"Unknown attribute `{}`, excepted `registry` or `target`",
key.to_string()
),
))
}
}
if input.is_empty() {
break;
}
input.parse::<syn::Token![,]>()?;
}
if registry.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `registry`".to_string(),
));
} else if target.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `target`".to_string(),
));
} else if maker.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `maker`".to_string(),
));
} else if return_type.is_none() {
return Err(syn::Error::new(
input.span(),
"Missing required attribute `return_type`".to_string(),
));
}
Ok(GenerateRegistryArgs {
registry: registry.unwrap(),
target: target.unwrap(),
maker: maker.unwrap(),
return_type: return_type.unwrap(),
})
}
}
/// The proc macro that generates the function to build the registry
///
/// # Attributes
/// - registry: (String) Name of the registry to generate
/// - target: (Identifier) Name of the resulting function
/// - maker: (Macro) A macro that will take all the newly constructed objects
/// comma-separated and create the resulting expression
/// - return_type: (Type) The return type of the generated function.
/// Must match the type of the macro invocation
///
/// # Example
/// ```
/// macro_rules! create_listeners {
/// ( $($construct:expr),+ $(,)? ) => {{
/// vec![$(Box::new($construct) as Box<dyn Listener>,)+]
/// }};
/// }
/// #[generate_registry(
/// registry = "listeners",
/// target = build_listeners,
/// return_type = Vec<Box<dyn Listener>>,
/// maker = create_listeners)]
///
/// fn main()
/// {
/// let all_listeners : Vec<Box<dyn Listener>> = build_listeners();
/// }
/// ```
#[proc_macro_attribute]
pub fn generate_registry(attr: TokenStream, input: TokenStream) -> TokenStream {
let args = parse_macro_input!(attr as GenerateRegistryArgs);
let reg_mtx = REGISTRY.lock().unwrap();
let mut stream = proc_macro2::TokenStream::new();
if let Some(names) = reg_mtx.borrow().get(args.registry.value().as_str()) {
for name in names {
let struct_name: proc_macro2::TokenStream = name.parse().unwrap();
stream.extend(quote::quote_spanned!(proc_macro2::Span::call_site() =>
#struct_name::new(),
));
}
} else {
panic!(
"Unable to find registry item with key=`{}`",
args.registry.value()
);
}
let function = args.target;
let return_type = args.return_type;
let maker = args.maker;
let rest: proc_macro2::TokenStream = input.into();
quote! {
fn #function() -> #return_type {
#maker!(
#stream
)
}
#rest
}
.into()
}

View file

@ -1,59 +0,0 @@
@import ../template.nml
@nav.previous = Blockquote
%<make_doc({"Blocks"}, "Blockquotes", "Blockquotes")>%
# Blockquotes
>[author=Lennart Poettering, cite=SystemD github issue 5998, url=https://github.com/systemd/systemd/pull/5998]
>>IMO, you shouldn't see the assignment of a CVE as a negative thing. The bug exists whether or not a CVE is assigned. The assignment of a CVE allows for people to consider what this issue means for them.
>
>Well, that makes no sense. You don't assign CVEs to every single random bugfix we do, do you? So why this one? I understand your currency is CVEs, but this just makes CVEs useless. And hardly anymore useful than a git history...
>
>I mean, I am fine with security bureaucracy if it actually helps anyone, but you just create noise where there shouldn't be any. And that way you just piss off the upstreams whose cooperation you actually should be interested in. Your at least made sure that my own interest in helping your efforts goes to zero...
# Nesting blockquotes
> Quotes can be nested
>> Here's a subquote
>>>[author=With author, cite=With cite]
>>> Here's another subquote
>> Back to the subquote
>
>> Another subquote
> This issue is getting a bit too heated, locking right now
```Markdown, Given by the following
> Nest quotes can be nested
>> Here's a subquote
>>>[author=With author, cite=With cite]
>>> Here's another subquote
>> Back to the subquote
>
>> Another subquote
> This issue is getting a bit too heated, locking right now
```
# Properties
Properties must be specified on the first `>` of the quote, inside brackets.
* ``author`` The quote author
* ``cite`` The quote source name
* ``url`` The quote source url (used for accessibility)
# Blockquotes styling
The blockquotes styling controls how the author, cite and url are rendered. This is controlled by style key ``style.blockquote``.
* ``author_pos`` Position of the author statement, available options:
*- `None` Hides the author
*- `Before` Displays the author before the quote
*- `After` Displays the author after the quote (default)
* ``format`` An array with 3 format strings to control how the author is displayed:
*-[offset=0] Format for author+cite
*- Format for author onl
*- Format for cite only
```JSON, Default Style
{
"author_pos": "After",
"format": ["{author}, {cite}", "{author}", "{cite}"],
}
```

View file

@ -1,344 +0,0 @@
@import ../template.nml
%<make_doc({"External Tools"}, "Graphviz", "Graphviz")>%
# Graphs from graphviz
#+LAYOUT_BEGIN Centered
[graph][width=50%]
digraph {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[shape=box,fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray];
filelist [color=orange, label="File List"];
doclist [color=orange, label="Document List"];
iscached [shape=diamond, color=red, label="Cached?"];
parse [color=white, label=Parse];
compile [color=white, label=Compile];
cache [color=orange, label=Cache];
filelist -> iscached;
iscached -> cache[dir=both,color=lightblue,style=dashed];
iscached -> doclist[label="Yes",color=lightblue,style=dashed];
iscached -> parse[label="No",color=lightblue,style=dashed];
subgraph cluster_0 {
style=dotted;
color=white;
label = "Processing";
labeljust="l";
parse -> compile;
}
compile -> doclist[label=""];
buildnav [color=white, label="Build Navigation"];
xref [color=white, label="Resolve Cross-References"];
doclist -> xref;
doclist -> buildnav[label="Cached",color=lightblue,style=dashed];
subgraph cluster_1 {
style=dotted;
color=white;
label = "Post-Processing";
labeljust="l";
xref -> buildnav;
}
xref -> cache[color=lightblue,style=dashed];
output [color=orange, label="Output"];
buildnav -> output;
}
[/graph]
#+LAYOUT_END
The Graphviz functionnality requires the `dot` executable. More information on [Graphviz's website](file:///home/baraquiel/Programming/nml_rs/out/Graphviz.html).
# Synopsis
Graphs blocks are delimited by `` [graph]...[/graph]``
# Properties
* ``layout`` The layout engine, defaults to `dot`
see [Graphviz's documentation](https://graphviz.org/docs/layouts/). Allowed values:
*- [`dot`](https://graphviz.org/docs/layouts/dot/)
*- [`neato`](https://graphviz.org/docs/layouts/neato/)
*- [`fdp`](https://graphviz.org/docs/layouts/fdp/)
*- [`sfdp`](https://graphviz.org/docs/layouts/sfdp/)
*- [`circo`](https://graphviz.org/docs/layouts/circo/)
*- [`twopi`](https://graphviz.org/docs/layouts/twopi/)
*- [`osage`](https://graphviz.org/docs/layouts/osage/)
*- [`patchwork`](https://graphviz.org/docs/layouts/patchwork/)
* ``width`` The resulting svg's width property, defaults to `100%`
# Examples
#+LAYOUT_BEGIN[style=flex:0.33] Split
[graph]
digraph UML_Class_diagram {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray90];
graph [
label="UML Class diagram demo"
labelloc="t"
fontname="Helvetica,Arial,sans-serif"
]
node [
fontname="Helvetica,Arial,sans-serif"
shape=record
style=filled
fillcolor=gray95
]
edge [fontname="Helvetica,Arial,sans-serif"]
edge [arrowhead=vee style=dashed]
Client -> Interface1 [label=dependency]
Client -> Interface2
edge [dir=back arrowtail=empty style=""]
Interface1 -> Class1 [xlabel=inheritance]
Interface2 -> Class1 [dir=none]
Interface2 [label="" xlabel="Simple\ninterface" shape=circle]
Interface1[label = <{<b>«interface» I/O</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
Class1[label = <{<b>I/O class</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
edge [dir=back arrowtail=empty style=dashed]
Class1 -> System_1 [label=implementation]
System_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>System</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left" >+ property</td> </tr>
<tr> <td port="ss1" align="left" >- Subsystem 1</td> </tr>
<tr> <td port="ss2" align="left" >- Subsystem 2</td> </tr>
<tr> <td port="ss3" align="left" >- Subsystem 3</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">+ method<br/>...<br align="left"/></td> </tr>
</table>>
]
edge [dir=back arrowtail=diamond]
System_1:ss1 -> Subsystem_1 [xlabel="composition"]
Subsystem_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 1</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_2 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 2</b> </td> </tr>
<tr> <td>
<table align="left" border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_3 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 3</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
System_1:ss2 -> Subsystem_2;
System_1:ss3 -> Subsystem_3;
edge [xdir=back arrowtail=odiamond]
Subsystem_1:r1 -> "Shared resource" [label=aggregation]
Subsystem_2:r1 -> "Shared resource"
Subsystem_3:r1 -> "Shared resource"
"Shared resource" [
label = <{
<b>Shared resource</b>
|
+ property<br align="left"/>
...<br align="left"/>
|
+ method<br align="left"/>
...<br align="left"/>
}>
]
}
[/graph]
#+LAYOUT_NEXT[style=flex:0.66]
Generated by the following code:
``
[graph]
digraph UML_Class_diagram {
bgcolor=transparent;
graph[fontcolor=darkgray];
node[fontcolor=darkgray];
edge[fontcolor=darkgray, color=gray90];
graph [
label="UML Class diagram demo"
labelloc="t"
fontname="Helvetica,Arial,sans-serif"
]
node [
fontname="Helvetica,Arial,sans-serif"
shape=record
style=filled
fillcolor=gray95
]
edge [fontname="Helvetica,Arial,sans-serif"]
edge [arrowhead=vee style=dashed]
Client -> Interface1 [label=dependency]
Client -> Interface2
edge [dir=back arrowtail=empty style=""]
Interface1 -> Class1 [xlabel=inheritance]
Interface2 -> Class1 [dir=none]
Interface2 [label="" xlabel="Simple\ninterface" shape=circle]
Interface1[label = <{<b>«interface» I/O</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
Class1[label = <{<b>I/O class</b> | + property<br align="left"/>...<br align="left"/>|+ method<br align="left"/>...<br align="left"/>}>]
edge [dir=back arrowtail=empty style=dashed]
Class1 -> System_1 [label=implementation]
System_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>System</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left" >+ property</td> </tr>
<tr> <td port="ss1" align="left" >- Subsystem 1</td> </tr>
<tr> <td port="ss2" align="left" >- Subsystem 2</td> </tr>
<tr> <td port="ss3" align="left" >- Subsystem 3</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">+ method<br/>...<br align="left"/></td> </tr>
</table>>
]
edge [dir=back arrowtail=diamond]
System_1:ss1 -> Subsystem_1 [xlabel="composition"]
Subsystem_1 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 1</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_2 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 2</b> </td> </tr>
<tr> <td>
<table align="left" border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
Subsystem_3 [
shape=plain
label=<<table border="0" cellborder="1" cellspacing="0" cellpadding="4">
<tr> <td> <b>Subsystem 3</b> </td> </tr>
<tr> <td>
<table border="0" cellborder="0" cellspacing="0" >
<tr> <td align="left">+ property</td> </tr>
<tr> <td align="left" port="r1">- resource</td> </tr>
<tr> <td align="left">...</td> </tr>
</table>
</td> </tr>
<tr> <td align="left">
+ method<br/>
...<br align="left"/>
</td> </tr>
</table>>
]
System_1:ss2 -> Subsystem_2;
System_1:ss3 -> Subsystem_3;
edge [xdir=back arrowtail=odiamond]
Subsystem_1:r1 -> "Shared resource" [label=aggregation]
Subsystem_2:r1 -> "Shared resource"
Subsystem_3:r1 -> "Shared resource"
"Shared resource" [
label = <{
<b>Shared resource</b>
|
+ property<br align="left"/>
...<br align="left"/>
|
+ method<br align="left"/>
...<br align="left"/>
}>
]
}
[/graph]
``
#+LAYOUT_END
# Graphiz cache
Graphviz graphs that have been rendered to **svg** are stored in the cache database, under table ``cached_dot``.
Unless you modify the graph or it's properties, it won't be rendered again, instead it will be sourced from the database.
# Bindigs
* ``Lua, nml.graphviz.push(layout, width, dot)``
** ``layout`` *(string)* the layout engine
** ``width`` *(string)* the width property (empty string for default)
** ``dot`` *(string)* the graphviz code

View file

@ -1,16 +1,12 @@
@import ../template.nml @import ../template.nml
%<make_doc({"External Tools"}, "LaTeX", "LaTeX")>% @compiler.output = latex.html
@nav.title = LaTeX
@nav.category = External Tools
@html.page_title = Documentation | LaTeX
@LaTeX = $|[kind=inline, caption=LaTeX]\LaTeX|$ @LaTeX = $|[kind=inline, caption=LaTeX]\LaTeX|$
#+LAYOUT_BEGIN Centered
*Bring some %LaTeX% unto your document!* *Bring some %LaTeX% unto your document!*
#+LAYOUT_END
# Requirements
In order to use LaTeX processing, you need to have a %LaTeX% distribution installed. We recommend the [TeX Live](https://en.wikipedia.org/wiki/TeX_Live) distribution.
You'll also need to install the [latex2svg](https://github.com/ef3d0c3e/nml/blob/master/third/latex2svg) python script provided with NML. You'll have to follow the installation instructions from the [original latex2svg repository](https://github.com/Moonbase59/latex2svg). If you don't want to add the script to your `\$PATH`, you can set the executable path in the §{tex_env}[caption=LaTeX environment].
# Inline Math # Inline Math
@ -48,7 +44,6 @@ $|\begin{tikzpicture}
`` ``
Gives the following: Gives the following:
#+LAYOUT_BEGIN Centered
$|\begin{tikzpicture} $|\begin{tikzpicture}
\begin{axis} \begin{axis}
\addplot3[patch,patch refines=3, \addplot3[patch,patch refines=3,
@ -69,9 +64,8 @@ $|\begin{tikzpicture}
}; };
\end{axis} \end{axis}
\end{tikzpicture}|$ \end{tikzpicture}|$
#+LAYOUT_END
#{tex_env} LaTeX environment # LaTeX environment
You can define multiple %LaTeX% environment, the default being `main` You can define multiple %LaTeX% environment, the default being `main`
* ``@tex.env.fontsize`` The fontsize (in pt) specified to `latex2svg` (default: `12`). * ``@tex.env.fontsize`` The fontsize (in pt) specified to `latex2svg` (default: `12`).
@ -105,20 +99,4 @@ To set the environment you wish to use for a particular %LaTeX% element, set the
%LaTeX% elements that have been successfully rendered to **svg** are stored in the cache database, to avoid processing them a second time. %LaTeX% elements that have been successfully rendered to **svg** are stored in the cache database, to avoid processing them a second time.
Note that this cache is shared between documents, so you don't need to reprocess them if they share the same environment. Note that this cache is shared between documents, so you don't need to reprocess them if they share the same environment.
They are stored under the table named ``Plain Text,cached_tex``, if you modify the `env` all elements will be reprocessed which may take a while... They are stored under the table named ``cached_tex``, if you modify the `env` all elements will be reprocessed which may take a while...
# Bindings
* ``Lua, nml.tex.push_math(kind, tex [, env [, caption]])``
inserts a math mode %LaTeX% element.
** ``kind`` *(string)* the element kind (inline or block)
** ``tex`` *(string)* the %LaTeX% code
** ``env`` *(string)* the %LaTeX% environment (defaults to `main`)
** ``caption`` *(string)* the accessibility caption
* ``Lua, nml.tex.push(kind, tex [, env [, caption]])``
inserts a non-math %LaTeX% element.
** ``kind`` *(string)* the element kind (inline or block)
** ``tex`` *(string)* the %LaTeX% code
** ``env`` *(string)* the %LaTeX% environment (defaults to `main`)
** ``caption`` *(string)* the accessibility caption

View file

@ -1,4 +1,6 @@
@import template.nml @import template.nml
%<make_doc({}, "Index", "Index")>% @compiler.output = index.html
@nav.title = Documentation
@html.page_title = Documentation | Index
# Welcome to the NML documentation! # Welcome to the NML documentation!

View file

@ -1,5 +1,8 @@
@import ../template.nml @import ../template.nml
%<make_doc({"Lua"}, "Lua", "Lua Basics")>% @compiler.output = lua.html
@nav.title = Lua
@nav.category = Lua
@html.page_title = Documentation | Lua
# Running lua code # Running lua code

View file

@ -1,31 +0,0 @@
@import template.nml
@nav.previous = Sections
%<make_doc({}, "References", "References")>%
#{internal_references} Internal references
Internal references allow you to create references to elements defined within the current document.
Reference the the current section: ``§{internal_reference}`` → §{internal_references}
## Media references
![flower](assets/flower.webm)[caption = Flower]
When you reference a medium from the current document, the reference can be hovered to show the referenced medium: §{flower}.
# External references
You can reference elements from other documents by adding the document's name before the reference name (separated by a ``#``).
The document name refers to the output file (as defined by the variable `compiler.output`) excluding the extension.
* ``§{doc#ref}``: Finds reference named `ref` in document named `doc`.
* ``§{#ref}``: Finds reference named `ref` in all documents.
Note that this will fail if there are multiple documents defining reference `ref`.
For instance:
* ``§{LaTeX#tex_env}[caption=LaTeX environment]`` → §{LaTeX#tex_env}[caption=LaTeX environment]
* ``§{#tex_env}[caption=LaTeX environment]`` → §{#tex_env}[caption=LaTeX environment]
# Properties
* ``caption`` The display caption for the reference

View file

@ -1,67 +0,0 @@
@import template.nml
@nav.previous = Getting Started
%<make_doc({}, "Sections", "Sections")>%
#{first} Sections
To add a section to your document, put one or more ``Plain Text, #`` at the start of the line, followed a space and the name of your section.
Which will render as:
#+LAYOUT_BEGIN Split
:: Make sure they don't pollute the ToC
#+ Section name
##+ Subsection
##*+ Unnumbered section
##+ Unnumbered section
#+ This section is not in the ToC
#+LAYOUT_NEXT
Given by the following:
``
# Section name
## Subsection
#* Unnumbered section
#+ This section is not in the ToC
``
#+LAYOUT_END
# Sections references
You can create a referenceable section by using ``Plain Text, #{refname}``, where `refname` is an internal reference name for use only within this document.
You can then create a clickable reference to this section: ``§{refname}`` or ``§{refname}[caption=Click me!]``. Below is an example of this in action:
###{refname}+* Section
§{refname}[caption=Click me!] or §{first}[caption=First section]
``
###{refname}+* Section
§{refname}[caption=Click me!] or §{first}[caption=First section]
``
# Section styling
The styling for the section link is controlled by the style key ``style.section``
* ``link_pos``: `Before|After|None` Position of the section link.
* ``link``: `[Before, Link, After]` 3 strings-array
```JSON, Default Style
{
"link_pos": "Before",
"link": ["", "🔗", " "]
}
```
# Bindings
* ``Lua, nml.section.push(title, depth, [, kind [, reference]])``
** ``title`` *(string)* the section display title
** ``depth`` *(number)* the section depth
** ``kind`` *(string)* the section kind
**- `\*` for unnumbered
**- `+` for outside of the table of content
**- `\*+` or `+\*` for both
** ``reference`` *(string)* the section reference name

View file

@ -1,32 +0,0 @@
@import template.nml
@nav.previous = Index
%<make_doc({}, "Getting Started", "Getting Started")>%
# Building NML
You need at least the nightly version of rustc to compile NML.
Instruction for your operating system can be found on [Rust's website](https://forge.rust-lang.org/infra/other-installation-methods.html).
You'll also need liblua 5.4 installed. You can then move the `nml` executable in `target/release/nml` into your `\$PATH`
``cargo build --bin nml`` or for release mode: ``cargo build --release --bin nml``
# Building your first document
* ``nml -i input.nml -o output.html``
# Using the cache
NML relies on sqlite to keep a cache of precompiled elements that take a long time to process (e.g $|[kind=inline] \LaTeX|$).
To enable caching, use option `-d` with a path: ``-d cache.db``. You can reuse the same cache for multiple documents and benefit from cached elements.
Note that in directory-processing mode, a cache is required so that only modified ``.nml`` files get reprocessed.
# Directory-Processing mode
To use directory-processing mode, you need to pass an input directory and an output directory. Directory-processing mode requires that you use a database, so that it knows which documents have already been compiled. If the output directory doesn't exist, it will be automatically created.
Compiling the docs:
``Plain Text,
nml -i docs -o docs_out -d cache.db
``
If you modify an ``Plain Text,@import``ed file, you will need to use the ``--force-rebuild`` option, as NML currently doesn't track which files are imported by other files.

View file

@ -1,12 +1,15 @@
@import ../template.nml @import ../template.nml
%<make_doc({"Styles"}, "Basic", "Basic Styles")>% @compiler.output = basic.html
@nav.title = Basic
@nav.category = Styles
@html.page_title = Documentation | Basic Styles
# Basic styles # Basic styles
## Bold ## Bold
Enclose text between two ``**`` to render it **bold**! Enclose text between two ``**`` to render it **bold**!
* ``**Bold text**`` → **Bold text** * ``**Bold text**`` → **Bold text**
* ``Bold [**link**](#)`` → Bold [**link**](#) * ``**Bold [link](#)**`` → **Bold [link](#)**
## Italic ## Italic

View file

@ -1,62 +0,0 @@
@import ../template.nml
%<make_doc({"Styles"}, "Layouts", "Basic Layouts")>%
# Layouts
You can create layout blocks by using the following tokens:
* ``#+LAYOUT_BEGIN <layout_name>`` Starts layout `<layout_name>`
* ``#+LAYOUT_NEXT`` Advances layout to the next block
* ``#+LAYOUT_END`` Ends last created layout
Here's an example of what you can do using layouts (with flashy colors for show):
#+LAYOUT_BEGIN[style=background-color:#F00;flex:0.5] Split
First
#+LAYOUT_BEGIN[style=background-color:#FF0] Centered
Second
#+LAYOUT_END
#+LAYOUT_NEXT[style=background-color:#00F]
Third
#+LAYOUT_BEGIN[style=background-color:#0FF] Split
Fourth
#+LAYOUT_NEXT[style=background-color:#0F0]
Fifth
#+LAYOUT_END
#+LAYOUT_END
Given by the following code:
```Plain Text
#+LAYOUT_BEGIN[style=background-color:#F00;flex:0.5] Split
First
#+LAYOUT_BEGIN[style=background-color:#FF0] Centered
Second
#+LAYOUT_END
#+LAYOUT_NEXT[style=background-color:#00F]
Third
#+LAYOUT_BEGIN[style=background-color:#0FF] Split
Fourth
#+LAYOUT_NEXT[style=background-color:#0F0]
Fifth
#+LAYOUT_END
#+LAYOUT_END
```
*(indentation is for readability)*
# Available layouts
## Centered
Centered layout align text to the center of the current block.
####+* Style
The ``Centered`` layout uses the `.centered` css class to center the text.
####+* Properties
* ``style`` Added css style to the div (defaults to none)
## Split
####+* Style
The ``Split`` layout uses the `.split-container` and `.split` css class to create the desired layout.
If you wish to modify the relative width of the splits: add `style=flex: 0.5` in the properties, this makes the following split half the width of the other splits.
####+* Properties
* ``style`` Added css style to the div (defaults to none)

View file

@ -1,39 +1,7 @@
@import ../template.nml @import ../template.nml
%<make_doc({"Styles"}, "User-Defined", "User-Defined Styles")>% @compiler.output = user-defined.html
@nav.title = User-Defined
@nav.category = Styles
@html.page_title = Documentation | User-Defined Styles
# Defining a custom style # TODO
```Lua
%<[main]
function undercustom_start(color)
nml.raw.push("inline", "<span style=\"border-bottom: 1px dashed " .. color .. "\">")
end
function undercustom_end()
nml.raw.push("inline", "</span>")
end
nml.custom_style.define_toggled("Undercustom Red", "~", "undercustom_start(\"red\")", "undercustom_end()")
nml.custom_style.define_paired("Undercustom Green", "[|", "|]", "undercustom_start(\"Green\")", "undercustom_end()")
>%
```
%<[main]
function undercustom_start(color)
nml.raw.push("inline", "<span style=\"border-bottom: 1px dashed " .. color .. "\">")
end
function undercustom_end()
nml.raw.push("inline", "</span>")
end
nml.custom_style.define_toggled("Undercustom Red", "~", "undercustom_start(\"red\")", "undercustom_end()")
nml.custom_style.define_paired("Undercustom Green", "[|", "|]", "undercustom_start(\"Green\")", "undercustom_end()")
>%
Results in the following:
* ``Plain Text,~Dashed underline~`` → ~Dashed underline~
* ``Plain Text,[|Dashed underline|]`` → [|Dashed underline|]
# Limitations
* Custom styles cannot be removed and will be defined through the entire document
* Custom styles defined from lua must have their `start` and `end` functions in the `main` lua kernel.

View file

@ -6,27 +6,3 @@
\definecolor{__color1}{HTML}{d5d5d5} \\ \definecolor{__color1}{HTML}{d5d5d5} \\
\everymath{\color{__color1}\displaystyle} \everymath{\color{__color1}\displaystyle}
@tex.main.block_prepend = \color{__color1} @tex.main.block_prepend = \color{__color1}
@<
function make_doc(categories, title, page_title)
-- Navigation
nml.variable.insert("nav.title", title)
if categories[1] ~= nil
then
nml.variable.insert("nav.category", categories[1])
if categories[2] ~= nil
then
nml.variable.insert("nav.subcategory", categories[2])
end
end
-- HTML
nml.variable.insert("html.page_title", "NML | " .. page_title)
nml.variable.insert("compiler.output", page_title .. ".html")
end
>@
@@style.section = {
"link_pos": "Before",
"link": ["", "🔗 ", " "]
}

8
src/cache/cache.rs vendored
View file

@ -23,7 +23,7 @@ pub trait Cached {
fn key(&self) -> <Self as Cached>::Key; fn key(&self) -> <Self as Cached>::Key;
fn init(con: &Connection) -> Result<(), rusqlite::Error> { fn init(con: &mut Connection) -> Result<(), rusqlite::Error> {
con.execute(<Self as Cached>::sql_table(), ()).map(|_| ()) con.execute(<Self as Cached>::sql_table(), ()).map(|_| ())
} }
@ -38,7 +38,7 @@ pub trait Cached {
/// Note that on error, [`f`] may still have been called /// Note that on error, [`f`] may still have been called
fn cached<E, F>( fn cached<E, F>(
&self, &self,
con: &Connection, con: &mut Connection,
f: F, f: F,
) -> Result<<Self as Cached>::Value, CachedError<E>> ) -> Result<<Self as Cached>::Value, CachedError<E>>
where where
@ -62,10 +62,10 @@ pub trait Cached {
if let Some(value) = value { if let Some(value) = value {
// Found in cache // Found in cache
Ok(value) return Ok(value);
} else { } else {
// Compute a value // Compute a value
let value = match f(self) { let value = match f(&self) {
Ok(val) => val, Ok(val) => val,
Err(e) => return Err(CachedError::GenErr(e)), Err(e) => return Err(CachedError::GenErr(e)),
}; };

View file

@ -1,137 +1,48 @@
use std::cell::Ref;
use std::cell::RefCell; use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
use rusqlite::Connection; use rusqlite::Connection;
use crate::document::document::CrossReference;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::document::ElemReference; use crate::document::document::ElemReference;
use crate::document::variable::Variable; use crate::document::variable::Variable;
use super::postprocess::PostProcess;
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub enum Target { pub enum Target {
HTML, HTML,
#[allow(unused)]
LATEX, LATEX,
} }
pub struct Compiler<'a> { pub struct Compiler {
target: Target, target: Target,
cache: Option<&'a Connection>, cache: Option<RefCell<Connection>>,
reference_count: RefCell<HashMap<String, HashMap<String, usize>>>, reference_count: RefCell<HashMap<String, HashMap<String, usize>>>,
sections_counter: RefCell<Vec<usize>>, // TODO: External references, i.e resolved later
unresolved_references: RefCell<Vec<(usize, CrossReference)>>,
} }
impl<'a> Compiler<'a> { impl Compiler {
pub fn new(target: Target, con: Option<&'a Connection>) -> Self { pub fn new(target: Target, db_path: Option<String>) -> Self {
let cache = match db_path {
None => None,
Some(path) => match Connection::open(path) {
Err(e) => panic!("Cannot connect to database: {e}"),
Ok(con) => Some(con),
},
};
Self { Self {
target, target,
cache: con, cache: cache.map(|con| RefCell::new(con)),
reference_count: RefCell::new(HashMap::new()), reference_count: RefCell::new(HashMap::new()),
sections_counter: RefCell::new(vec![]),
unresolved_references: RefCell::new(vec![]),
} }
} }
/// Gets the section counter for a given depth
/// This function modifies the section counter
pub fn section_counter(&self, depth: usize) -> Ref<'_, Vec<usize>> {
// Increment current counter
if self.sections_counter.borrow().len() == depth {
self.sections_counter
.borrow_mut()
.last_mut()
.map(|id| *id += 1);
return Ref::map(self.sections_counter.borrow(), |b| b);
}
// Close
while self.sections_counter.borrow().len() > depth {
self.sections_counter.borrow_mut().pop();
}
// Open
while self.sections_counter.borrow().len() < depth {
self.sections_counter.borrow_mut().push(1);
}
Ref::map(self.sections_counter.borrow(), |b| b)
}
/// Sanitizes text for a [`Target`]
pub fn sanitize<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => str
.as_ref()
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;"),
_ => todo!("Sanitize not implemented"),
}
}
/// Sanitizes a format string for a [`Target`]
///
/// # Notes
///
/// This function may process invalid format string, which will be caught later
/// by runtime_format.
pub fn sanitize_format<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => {
let mut out = String::new();
let mut braces = 0;
for c in str.as_ref().chars() {
if c == '{' {
out.push(c);
braces += 1;
continue;
} else if c == '}' {
out.push(c);
if braces != 0 {
braces -= 1;
}
continue;
}
// Inside format args
if braces % 2 == 1 {
out.push(c);
continue;
}
match c {
'&' => out += "&amp;",
'<' => out += "&lt;",
'>' => out += "&gt;",
'"' => out += "&quot;",
_ => out.push(c),
}
}
out
}
_ => todo!("Sanitize not implemented"),
}
}
/// Gets a reference name
pub fn refname<S: AsRef<str>>(target: Target, str: S) -> String {
Self::sanitize(target, str).replace(' ', "_")
}
/// Inserts or get a reference id for the compiled document /// Inserts or get a reference id for the compiled document
/// ///
/// # Parameters /// # Parameters
/// - [`reference`] The reference to get or insert /// - [`reference`] The reference to get or insert
pub fn reference_id(&self, document: &dyn Document, reference: ElemReference) -> usize { pub fn reference_id<'a>(&self, document: &'a dyn Document, reference: ElemReference) -> usize {
let mut borrow = self.reference_count.borrow_mut(); let mut borrow = self.reference_count.borrow_mut();
let reference = document.get_from_reference(&reference).unwrap(); let reference = document.get_from_reference(&reference).unwrap();
let refkey = reference.refcount_key(); let refkey = reference.refcount_key();
@ -157,18 +68,22 @@ impl<'a> Compiler<'a> {
} }
} }
/// Inserts a new crossreference
pub fn insert_crossreference(&self, pos: usize, reference: CrossReference) {
self.unresolved_references
.borrow_mut()
.push((pos, reference));
}
pub fn target(&self) -> Target { self.target } pub fn target(&self) -> Target { self.target }
pub fn cache(&self) -> Option<&'a Connection> { pub fn cache(&self) -> Option<RefMut<'_, Connection>> {
self.cache self.cache.as_ref().map(RefCell::borrow_mut)
//self.cache.as_ref().map(RefCell::borrow_mut) }
pub fn sanitize<S: AsRef<str>>(target: Target, str: S) -> String {
match target {
Target::HTML => str
.as_ref()
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;"),
_ => todo!("Sanitize not implemented"),
}
} }
pub fn header(&self, document: &dyn Document) -> String { pub fn header(&self, document: &dyn Document) -> String {
@ -176,7 +91,10 @@ impl<'a> Compiler<'a> {
document: &dyn Document, document: &dyn Document,
var_name: &'static str, var_name: &'static str,
) -> Option<Rc<dyn Variable>> { ) -> Option<Rc<dyn Variable>> {
document.get_variable(var_name).or_else(|| { document
.get_variable(var_name)
.and_then(|var| Some(var))
.or_else(|| {
println!( println!(
"Missing variable `{var_name}` in {}", "Missing variable `{var_name}` in {}",
document.source().name() document.source().name()
@ -191,10 +109,7 @@ impl<'a> Compiler<'a> {
result += "<!DOCTYPE HTML><html><head>"; result += "<!DOCTYPE HTML><html><head>";
result += "<meta charset=\"UTF-8\">"; result += "<meta charset=\"UTF-8\">";
if let Some(page_title) = get_variable_or_error(document, "html.page_title") { if let Some(page_title) = get_variable_or_error(document, "html.page_title") {
result += format!( result += format!("<title>{}</title>", Compiler::sanitize(self.target(), page_title.to_string()))
"<title>{}</title>",
Compiler::sanitize(self.target(), page_title.to_string())
)
.as_str(); .as_str();
} }
@ -205,7 +120,7 @@ impl<'a> Compiler<'a> {
) )
.as_str(); .as_str();
} }
result += r#"</head><body><div class="layout">"#; result += r#"</head><body><div id="layout">"#;
// TODO: TOC // TODO: TOC
// TODO: Author, Date, Title, Div // TODO: Author, Date, Title, Div
@ -226,20 +141,20 @@ impl<'a> Compiler<'a> {
result result
} }
pub fn compile(&self, document: &dyn Document) -> (CompiledDocument, PostProcess) { pub fn compile(&self, document: &dyn Document) -> CompiledDocument {
let borrow = document.content().borrow(); let borrow = document.content().borrow();
// Header // Header
let header = self.header(document); let header = self.header(document);
// Body // Body
let mut body = r#"<div class="content">"#.to_string(); let mut body = r#"<div id="content">"#.to_string();
for i in 0..borrow.len() { for i in 0..borrow.len() {
let elem = &borrow[i]; let elem = &borrow[i];
match elem.compile(self, document, body.len()) { match elem.compile(self, document) {
Ok(result) => body.push_str(result.as_str()), Ok(result) => body.push_str(result.as_str()),
Err(err) => println!("Unable to compile element: {err}\n{elem:#?}"), Err(err) => println!("Unable to compile element: {err}\n{}", elem.to_string()),
} }
} }
body.push_str("</div>"); body.push_str("</div>");
@ -256,35 +171,14 @@ impl<'a> Compiler<'a> {
.map(|(key, var)| (key.clone(), var.to_string())) .map(|(key, var)| (key.clone(), var.to_string()))
.collect::<HashMap<String, String>>(); .collect::<HashMap<String, String>>();
// References CompiledDocument {
let references = document
.scope()
.borrow_mut()
.referenceable
.iter()
.map(|(key, reference)| {
let elem = document.get_from_reference(reference).unwrap();
let refid = self.reference_id(document, *reference);
(key.clone(), elem.refid(self, refid))
})
.collect::<HashMap<String, String>>();
let postprocess = PostProcess {
resolve_references: self.unresolved_references.replace(vec![]),
};
let cdoc = CompiledDocument {
input: document.source().name().clone(), input: document.source().name().clone(),
mtime: 0, mtime: 0,
variables, variables,
references,
header, header,
body, body,
footer, footer,
}; }
(cdoc, postprocess)
} }
} }
@ -295,14 +189,12 @@ pub struct CompiledDocument {
/// Modification time (i.e seconds since last epoch) /// Modification time (i.e seconds since last epoch)
pub mtime: u64, pub mtime: u64,
/// All the variables defined in the document // TODO: Also store exported references
/// with values mapped by [`Variable::to_string()`] // so they can be referenced from elsewhere
// This will also require rebuilding in case some exported references have changed...
/// Variables exported to string, so they can be querried later
pub variables: HashMap<String, String>, pub variables: HashMap<String, String>,
/// All the referenceable elements in the document
/// with values mapped by [`ReferenceableElement::refid()`]
pub references: HashMap<String, String>,
/// Compiled document's header /// Compiled document's header
pub header: String, pub header: String,
/// Compiled document's body /// Compiled document's body
@ -319,7 +211,6 @@ impl CompiledDocument {
input TEXT PRIMARY KEY, input TEXT PRIMARY KEY,
mtime INTEGER NOT NULL, mtime INTEGER NOT NULL,
variables TEXT NOT NULL, variables TEXT NOT NULL,
internal_references TEXT NOT NULL,
header TEXT NOT NULL, header TEXT NOT NULL,
body TEXT NOT NULL, body TEXT NOT NULL,
footer TEXT NOT NULL footer TEXT NOT NULL
@ -329,7 +220,7 @@ impl CompiledDocument {
fn sql_get_query() -> &'static str { "SELECT * FROM compiled_documents WHERE input = (?1)" } fn sql_get_query() -> &'static str { "SELECT * FROM compiled_documents WHERE input = (?1)" }
fn sql_insert_query() -> &'static str { fn sql_insert_query() -> &'static str {
"INSERT OR REPLACE INTO compiled_documents (input, mtime, variables, internal_references, header, body, footer) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)" "INSERT OR REPLACE INTO compiled_documents (input, mtime, variables, header, body, footer) VALUES (?1, ?2, ?3, ?4, ?5, ?6)"
} }
pub fn init_cache(con: &Connection) -> Result<usize, rusqlite::Error> { pub fn init_cache(con: &Connection) -> Result<usize, rusqlite::Error> {
@ -342,16 +233,15 @@ impl CompiledDocument {
input: input.to_string(), input: input.to_string(),
mtime: row.get_unwrap::<_, u64>(1), mtime: row.get_unwrap::<_, u64>(1),
variables: serde_json::from_str(row.get_unwrap::<_, String>(2).as_str()).unwrap(), variables: serde_json::from_str(row.get_unwrap::<_, String>(2).as_str()).unwrap(),
references: serde_json::from_str(row.get_unwrap::<_, String>(3).as_str()).unwrap(), header: row.get_unwrap::<_, String>(3),
header: row.get_unwrap::<_, String>(4), body: row.get_unwrap::<_, String>(4),
body: row.get_unwrap::<_, String>(5), footer: row.get_unwrap::<_, String>(5),
footer: row.get_unwrap::<_, String>(6),
}) })
}) })
.ok() .ok()
} }
/// Interts [`CompiledDocument`] into cache /// Inserts [`CompiledDocument`] into cache
pub fn insert_cache(&self, con: &Connection) -> Result<usize, rusqlite::Error> { pub fn insert_cache(&self, con: &Connection) -> Result<usize, rusqlite::Error> {
con.execute( con.execute(
Self::sql_insert_query(), Self::sql_insert_query(),
@ -359,7 +249,6 @@ impl CompiledDocument {
&self.input, &self.input,
&self.mtime, &self.mtime,
serde_json::to_string(&self.variables).unwrap(), serde_json::to_string(&self.variables).unwrap(),
serde_json::to_string(&self.references).unwrap(),
&self.header, &self.header,
&self.body, &self.body,
&self.footer, &self.footer,
@ -367,19 +256,3 @@ impl CompiledDocument {
) )
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sanitize_test() {
assert_eq!(Compiler::sanitize(Target::HTML, "<a>"), "&lt;a&gt;");
assert_eq!(Compiler::sanitize(Target::HTML, "&lt;"), "&amp;lt;");
assert_eq!(Compiler::sanitize(Target::HTML, "\""), "&quot;");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{<>&\"}"), "{<>&\"}");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{{<>}}"), "{{&lt;&gt;}}");
assert_eq!(Compiler::sanitize_format(Target::HTML, "{{<"), "{{&lt;");
}
}

View file

@ -1,4 +1,2 @@
pub mod compiler; pub mod compiler;
pub mod navigation; pub mod navigation;
pub mod process;
pub mod postprocess;

View file

@ -1,58 +1,45 @@
use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
use super::compiler::CompiledDocument; use super::compiler::CompiledDocument;
use super::compiler::Target; use super::compiler::Target;
use super::postprocess::PostProcess;
#[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct NavEntry {
title: String,
path: String,
previous: Option<String>,
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct NavEntries { pub struct NavEntry {
pub(self) entries: Vec<NavEntry>, pub(self) entries: Vec<(String, String)>,
pub(self) children: HashMap<String, NavEntries>, pub(self) children: HashMap<String, NavEntry>,
} }
impl NavEntries { impl NavEntry {
// FIXME: Sanitize // FIXME: Sanitize
pub fn compile(&self, target: Target, doc: &RefCell<CompiledDocument>) -> String { pub fn compile(&self, target: Target, doc: &CompiledDocument) -> String {
let doc_borrow = doc.borrow();
let categories = vec![ let categories = vec![
doc_borrow doc.get_variable("nav.category").map_or("", |s| s.as_str()),
.get_variable("nav.category") doc.get_variable("nav.subcategory")
.map_or("", |s| s.as_str()),
doc_borrow
.get_variable("nav.subcategory")
.map_or("", |s| s.as_str()), .map_or("", |s| s.as_str()),
]; ];
let mut result = String::new(); let mut result = String::new();
match target { match target {
Target::HTML => { Target::HTML => {
result += r#"<div class="navbar"><ul>"#; result += r#"<div id="navbar"><ul>"#;
fn process( fn process(
target: Target, target: Target,
categories: &Vec<&str>, categories: &Vec<&str>,
did_match: bool, did_match: bool,
result: &mut String, result: &mut String,
entry: &NavEntries, entry: &NavEntry,
depth: usize, depth: usize,
) { ) {
// Orphans = Links // Orphans = Links
for entry in &entry.entries { for (title, path) in &entry.entries {
result.push_str( result.push_str(
format!( format!(
r#"<li><a href="{}">{}</a></li>"#, r#"<li><a href="{}">{}</a></li>"#,
Compiler::sanitize(target, entry.path.as_str()), Compiler::sanitize(target, path),
Compiler::sanitize(target, entry.title.as_str()) Compiler::sanitize(target, title)
) )
.as_str(), .as_str(),
); );
@ -88,93 +75,47 @@ impl NavEntries {
} }
result result
} }
fn sort_entry(
entrymap: &HashMap<String, Option<String>>,
left_title: &str,
right_title: &str,
) -> std::cmp::Ordering {
let left_previous = entrymap.get(left_title).unwrap();
let right_previous = entrymap.get(right_title).unwrap();
match (left_previous, right_previous) {
(Some(lp), Some(rp)) => {
if lp.as_str() == right_title {
std::cmp::Ordering::Greater
} else if rp.as_str() == left_title {
std::cmp::Ordering::Less
} else if rp.as_str() == lp.as_str() {
left_title.cmp(right_title)
} else {
Self::sort_entry(entrymap, lp.as_str(), rp.as_str())
}
}
(Some(lp), None) => {
if right_title == lp.as_str() {
std::cmp::Ordering::Greater
} else {
left_title.cmp(right_title)
}
}
(None, Some(rp)) => {
if left_title == rp.as_str() {
std::cmp::Ordering::Less
} else {
left_title.cmp(right_title)
}
}
(None, None) => left_title.cmp(right_title),
}
}
} }
pub fn create_navigation( pub fn create_navigation(docs: &Vec<CompiledDocument>) -> Result<NavEntry, String> {
docs: &Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, let mut nav = NavEntry {
) -> Result<NavEntries, String> {
let mut nav = NavEntries {
entries: vec![], entries: vec![],
children: HashMap::new(), children: HashMap::new(),
}; };
// All paths (for duplicate checking) for doc in docs {
let mut all_paths = HashMap::new(); let cat = doc.get_variable("nav.category");
let subcat = doc.get_variable("nav.subcategory");
for (doc, _) in docs { let title = doc
let doc_borrow = doc.borrow();
let cat = doc_borrow.get_variable("nav.category");
let subcat = doc_borrow.get_variable("nav.subcategory");
let title = doc_borrow
.get_variable("nav.title") .get_variable("nav.title")
.or(doc_borrow.get_variable("doc.title")); .or(doc.get_variable("doc.title"));
let previous = doc_borrow.get_variable("nav.previous").cloned(); let path = doc.get_variable("compiler.output");
let path = doc_borrow.get_variable("compiler.output");
let (title, path) = match (title, path) { let (title, path) = match (title, path) {
(Some(title), Some(path)) => (title, path), (Some(title), Some(path)) => (title, path),
_ => { _ => {
eprintln!("Skipping navigation generation for `{}`, must have a defined `@nav.title` and `@compiler.output`", doc_borrow.input); eprintln!("Skipping navigation generation for `{}`, must have a defined `@nav.title` and `@compiler.output`", doc.input);
continue; continue;
} }
}; };
// Get entry to insert into
let pent = if let Some(subcat) = subcat { let pent = if let Some(subcat) = subcat {
let cat = match cat { let cat = match cat {
Some(cat) => cat, Some(cat) => cat,
None => { None => {
eprintln!( eprintln!(
"Skipping `{}`: No `@nav.category`, but `@nav.subcategory` is set", "Skipping `{}`: No `@nav.category`, but `@nav.subcategory` is set",
doc_borrow.input doc.input
); );
continue; continue;
} }
}; };
let cat_ent = match nav.children.get_mut(cat.as_str()) { let mut cat_ent = match nav.children.get_mut(cat.as_str()) {
Some(cat_ent) => cat_ent, Some(cat_ent) => cat_ent,
None => { None => {
// Insert // Insert
nav.children.insert(cat.clone(), NavEntries::default()); nav.children.insert(cat.clone(), NavEntry::default());
nav.children.get_mut(cat.as_str()).unwrap() nav.children.get_mut(cat.as_str()).unwrap()
} }
}; };
@ -183,9 +124,7 @@ pub fn create_navigation(
Some(subcat_ent) => subcat_ent, Some(subcat_ent) => subcat_ent,
None => { None => {
// Insert // Insert
cat_ent cat_ent.children.insert(subcat.clone(), NavEntry::default());
.children
.insert(subcat.clone(), NavEntries::default());
cat_ent.children.get_mut(subcat.as_str()).unwrap() cat_ent.children.get_mut(subcat.as_str()).unwrap()
} }
} }
@ -194,7 +133,7 @@ pub fn create_navigation(
Some(cat_ent) => cat_ent, Some(cat_ent) => cat_ent,
None => { None => {
// Insert // Insert
nav.children.insert(cat.clone(), NavEntries::default()); nav.children.insert(cat.clone(), NavEntry::default());
nav.children.get_mut(cat.as_str()).unwrap() nav.children.get_mut(cat.as_str()).unwrap()
} }
} }
@ -202,158 +141,8 @@ pub fn create_navigation(
&mut nav &mut nav
}; };
// Find duplicates titles in current parent pent.entries.push((title.clone(), path.clone()))
for entry in &pent.entries {
if &entry.title == title {
return Err(format!(
"Conflicting entry title `{title}` for entries with the same parent: ({})",
pent.entries
.iter()
.map(|entry| entry.title.clone())
.collect::<Vec<_>>()
.join(", ")
));
} }
}
// Find duplicate paths
if let Some(dup_title) = all_paths.get(path) {
return Err(format!("Conflicting paths: `{path}`. Previously used for entry: `{dup_title}`, conflicting use in `{title}`"));
}
all_paths.insert(path.clone(), title.clone());
pent.entries.push(NavEntry {
title: title.clone(),
path: path.clone(),
previous,
});
}
// Sort entries
fn sort_entries(nav: &mut NavEntries) {
let entrymap = nav
.entries
.iter()
.map(|ent| (ent.title.clone(), ent.previous.clone()))
.collect::<HashMap<String, Option<String>>>();
nav.entries
.sort_by(|l, r| NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str()));
for (_, child) in &mut nav.children {
sort_entries(child);
}
}
sort_entries(&mut nav);
Ok(nav) Ok(nav)
} }
#[cfg(test)]
mod tests {
use rand::prelude::SliceRandom;
use rand::rngs::OsRng;
use crate::compiler::process::process_from_memory;
use super::*;
#[test]
fn sort() {
let entries: Vec<NavEntry> = vec![
NavEntry {
title: "Index".into(),
path: "".into(),
previous: None,
},
NavEntry {
title: "AB".into(),
path: "".into(),
previous: Some("Index".into()),
},
NavEntry {
title: "Getting Started".into(),
path: "".into(),
previous: Some("Index".into()),
},
NavEntry {
title: "Sections".into(),
path: "".into(),
previous: Some("Getting Started".into()),
},
NavEntry {
title: "Style".into(),
path: "".into(),
previous: Some("Getting Started".into()),
},
];
let mut shuffled = entries.clone();
for _ in 0..10 {
let mut rng = OsRng {};
shuffled.shuffle(&mut rng);
let entrymap = shuffled
.iter()
.map(|ent| (ent.title.clone(), ent.previous.clone()))
.collect::<HashMap<String, Option<String>>>();
shuffled.sort_by(|l, r| {
NavEntries::sort_entry(&entrymap, l.title.as_str(), r.title.as_str())
});
assert_eq!(shuffled, entries);
}
}
#[test]
pub fn batch() {
let result = process_from_memory(
Target::HTML,
vec![
r#"
@html.page_title = 0
@compiler.output = 0.html
@nav.title = C
@nav.category = First
"#
.into(),
r#"
@html.page_title = 1
@compiler.output = 1.html
@nav.title = A
@nav.category = First
"#
.into(),
r#"
@html.page_title = 2
@compiler.output = 2.html
@nav.title = B
@nav.category = First
"#
.into(),
],
)
.unwrap();
let nav = create_navigation(&result).unwrap();
assert_eq!(
nav.children.get("First").unwrap().entries,
vec![
NavEntry {
title: "A".to_string(),
path: "1.html".to_string(),
previous: None
},
NavEntry {
title: "B".to_string(),
path: "2.html".to_string(),
previous: None
},
NavEntry {
title: "C".to_string(),
path: "0.html".to_string(),
previous: None
},
]
);
}
}

View file

@ -1,81 +0,0 @@
use std::cell::RefCell;
use crate::document::document::CrossReference;
use super::compiler::CompiledDocument;
use super::compiler::Target;
/// Represents the list of tasks that have to run after the document has been compiled and the
/// compiled document list has been built. Every task is stored with a raw byte position in the
/// compiled document's body. The position represents the original position and thus should be
/// offset accordingly to other post-processing tasks.
pub struct PostProcess {
/// List of references to resolve i.e insert the resolved refname at a certain byte position
/// in the document's body
pub resolve_references: Vec<(usize, CrossReference)>,
}
impl PostProcess {
/// Applies postprocessing to a [`CompiledDocument`]
pub fn apply(
&self,
_target: Target,
list: &Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>,
doc: &RefCell<CompiledDocument>,
) -> Result<String, String> {
let mut content = doc.borrow().body.clone();
let mut offset = 0;
for (pos, cross_ref) in &self.resolve_references {
// Cross-references
let mut found_ref: Option<(String, &RefCell<CompiledDocument>)> = None;
match cross_ref {
CrossReference::Unspecific(name) => {
for (doc, _) in list {
if let Some(found) = doc.borrow().references.get(name) {
// Check for duplicates
if let Some((_, previous_doc)) = &found_ref {
return Err(format!("Cannot use an unspecific reference for reference named: `{name}`. Found in document `{}` but also in `{}`. Specify the source of the reference to resolve the conflict.", previous_doc.borrow().input, doc.borrow().input));
}
found_ref = Some((found.clone(), doc));
}
}
}
CrossReference::Specific(doc_name, name) => {
let ref_doc = list.iter().find(|(doc, _)| {
let doc_borrow = doc.borrow();
if let Some(outname) = doc_borrow.variables.get("compiler.output") {
// Strip extension
let split_at = outname.rfind('.').unwrap_or(outname.len());
return doc_name == outname.split_at(split_at).0;
}
false
});
if ref_doc.is_none() {
return Err(format!(
"Cannot find document `{doc_name}` for reference `{name}` in `{}`",
doc.borrow().input
));
}
if let Some(found) = ref_doc.unwrap().0.borrow().references.get(name) {
found_ref = Some((found.clone(), &ref_doc.unwrap().0));
}
}
}
if let Some((found_ref, found_doc)) = &found_ref {
let found_borrow = found_doc.borrow();
let found_path = found_borrow.get_variable("compiler.output").ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
let insert_content = format!("{found_path}#{found_ref}");
content.insert_str(pos + offset, insert_content.as_str());
offset += insert_content.len();
} else {
return Err(format!("Cannot find reference `{cross_ref}` from document `{}`. Aborting postprocessing.", doc.borrow().input));
}
}
Ok(content)
}
}

View file

@ -1,196 +0,0 @@
use std::cell::RefCell;
use std::path::PathBuf;
use std::rc::Rc;
use std::time::UNIX_EPOCH;
use rusqlite::Connection;
use crate::document::document::Document;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::source::Source;
use crate::parser::source::SourceFile;
use super::compiler::CompiledDocument;
use super::compiler::Compiler;
use super::compiler::Target;
use super::postprocess::PostProcess;
/// Parses a source file into a document
fn parse(
parser: &LangParser,
source: Rc<dyn Source>,
debug_opts: &Vec<String>,
) -> Result<Box<dyn Document<'static>>, String> {
// Parse
//let source = SourceFile::new(input.to_string(), None).unwrap();
let (doc, _) = parser.parse(ParserState::new(parser, None), source.clone(), None);
if debug_opts.contains(&"ast".to_string()) {
println!("-- BEGIN AST DEBUGGING --");
doc.content()
.borrow()
.iter()
.for_each(|elem| println!("{elem:#?}"));
println!("-- END AST DEBUGGING --");
}
if debug_opts.contains(&"ref".to_string()) {
println!("-- BEGIN REFERENCES DEBUGGING --");
let sc = doc.scope().borrow();
sc.referenceable.iter().for_each(|(name, reference)| {
println!(" - {name}: `{:#?}`", doc.get_from_reference(reference));
});
println!("-- END REFERENCES DEBUGGING --");
}
if debug_opts.contains(&"var".to_string()) {
println!("-- BEGIN VARIABLES DEBUGGING --");
let sc = doc.scope().borrow();
sc.variables.iter().for_each(|(_name, var)| {
println!(" - `{:#?}`", var);
});
println!("-- END VARIABLES DEBUGGING --");
}
if parser.has_error() {
return Err("Parsing failed due to errors while parsing".to_string());
}
Ok(doc)
}
/// Takes a list of paths and processes it into a list of compiled documents
pub fn process(
target: Target,
files: Vec<PathBuf>,
db_path: &Option<String>,
force_rebuild: bool,
debug_opts: &Vec<String>,
) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
let mut compiled = vec![];
let current_dir = std::env::current_dir()
.map_err(|err| format!("Unable to get the current working directory: {err}"))?;
let con = db_path
.as_ref()
.map_or(Connection::open_in_memory(), Connection::open)
.map_err(|err| format!("Unable to open connection to the database: {err}"))?;
CompiledDocument::init_cache(&con)
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
let parser = LangParser::default();
for file in files {
let meta = std::fs::metadata(&file)
.map_err(|err| format!("Failed to get metadata for `{file:#?}`: {err}"))?;
let modified = meta
.modified()
.map_err(|err| format!("Unable to query modification time for `{file:#?}`: {err}"))?;
// Move to file's directory
let file_parent_path = file
.parent()
.ok_or(format!("Failed to get parent path for `{file:#?}`"))?;
std::env::set_current_dir(file_parent_path)
.map_err(|err| format!("Failed to move to path `{file_parent_path:#?}`: {err}"))?;
let parse_and_compile = || -> Result<(CompiledDocument, Option<PostProcess>), String> {
// Parse
let source = SourceFile::new(file.to_str().unwrap().to_string(), None).unwrap();
println!("Parsing {}...", source.name());
let doc = parse(&parser, Rc::new(source), debug_opts)?;
// Compile
let compiler = Compiler::new(target, Some(&con));
let (mut compiled, postprocess) = compiler.compile(&*doc);
compiled.mtime = modified.duration_since(UNIX_EPOCH).unwrap().as_secs();
Ok((compiled, Some(postprocess)))
};
let (cdoc, post) = if force_rebuild {
parse_and_compile()?
} else {
match CompiledDocument::from_cache(&con, file.to_str().unwrap()) {
Some(compiled) => {
if compiled.mtime < modified.duration_since(UNIX_EPOCH).unwrap().as_secs() {
parse_and_compile()?
} else {
(compiled, None)
}
}
None => parse_and_compile()?,
}
};
compiled.push((RefCell::new(cdoc), post));
}
for (doc, postprocess) in &compiled {
if postprocess.is_none() {
continue;
}
// Post processing
let body = postprocess
.as_ref()
.unwrap()
.apply(target, &compiled, doc)?;
doc.borrow_mut().body = body;
// Insert into cache
doc.borrow().insert_cache(&con).map_err(|err| {
format!(
"Failed to insert compiled document from `{}` into cache: {err}",
doc.borrow().input
)
})?;
}
std::env::set_current_dir(current_dir)
.map_err(|err| format!("Failed to set current directory: {err}"))?;
Ok(compiled)
}
/// Processes sources from in-memory strings
/// This function is indented for testing
#[cfg(test)]
pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
let mut compiled = vec![];
let parser = LangParser::default();
for (idx, content) in sources.iter().enumerate() {
let parse_and_compile = || -> Result<(CompiledDocument, Option<PostProcess>), String> {
// Parse
let source = SourceFile::with_content(format!("{idx}"), content.clone(), None);
let doc = parse(&parser, Rc::new(source), &vec![])?;
// Compile
let compiler = Compiler::new(target, None);
let (compiled, postprocess) = compiler.compile(&*doc);
Ok((compiled, Some(postprocess)))
};
let (cdoc, post) = parse_and_compile()?;
compiled.push((RefCell::new(cdoc), post));
}
for (doc, postprocess) in &compiled {
if postprocess.is_none() {
continue;
}
// Post processing
let body = postprocess
.as_ref()
.unwrap()
.apply(target, &compiled, doc)?;
doc.borrow_mut().body = body;
}
Ok(compiled)
}

View file

@ -4,17 +4,13 @@ use std::cell::RefMut;
use std::collections::hash_map::HashMap; use std::collections::hash_map::HashMap;
use std::rc::Rc; use std::rc::Rc;
use serde::Deserialize;
use serde::Serialize;
use crate::parser::source::Source; use crate::parser::source::Source;
use super::element::Element; use super::element::Element;
use super::element::ReferenceableElement; use super::element::ReferenceableElement;
use super::variable::Variable; use super::variable::Variable;
/// For references inside the current document #[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub enum ElemReference { pub enum ElemReference {
Direct(usize), Direct(usize),
@ -22,30 +18,10 @@ pub enum ElemReference {
Nested(usize, usize), Nested(usize, usize),
} }
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum CrossReference {
/// When the referenced document is unspecified
Unspecific(String),
/// When the referenced document is specified
Specific(String, String),
}
impl core::fmt::Display for CrossReference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self
{
CrossReference::Unspecific(name) => write!(f, "#{name}"),
CrossReference::Specific(doc_name, name) => write!(f, "{doc_name}#{name}"),
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct Scope { pub struct Scope {
/// List of all referenceable elements in current scope. /// List of all referenceable elements in current scope.
/// All elements in this should return a non empty element /// All elements in this should return a non empty
/// when [`Element::as_referenceable`] is called
pub referenceable: HashMap<String, ElemReference>, pub referenceable: HashMap<String, ElemReference>,
pub variables: HashMap<String, Rc<dyn Variable>>, pub variables: HashMap<String, Rc<dyn Variable>>,
} }
@ -75,7 +51,7 @@ impl Scope {
// Variables // Variables
self.variables self.variables
.extend(other.variables.drain()); .extend(other.variables.drain().map(|(name, var)| (name, var)));
} }
false => { false => {
// References // References
@ -165,15 +141,15 @@ pub trait Document<'a>: core::fmt::Debug {
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> { fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
match self.scope().borrow().variables.get(name) { match self.scope().borrow().variables.get(name) {
Some(variable) => { Some(variable) => {
Some(variable.clone()) return Some(variable.clone());
} }
// Continue search recursively // Continue search recursively
None => match self.parent() { None => match self.parent() {
Some(parent) => parent.get_variable(name), Some(parent) => return parent.get_variable(name),
// Not found // Not found
None => None, None => return None,
}, },
} }
} }
@ -189,23 +165,27 @@ pub trait Document<'a>: core::fmt::Debug {
scope: &RefCell<Scope>, scope: &RefCell<Scope>,
merge_as: Option<&String>, merge_as: Option<&String>,
) { ) {
if let Some(merge_as) = merge_as { self.scope().borrow_mut().merge( match merge_as {
&mut scope.borrow_mut(), Some(merge_as) => self.scope().borrow_mut().merge(
&mut *scope.borrow_mut(),
merge_as, merge_as,
self.content().borrow().len(), self.content().borrow().len() + 1,
) } ),
_ => {}
}
// Content // Content
self.content() self.content()
.borrow_mut() .borrow_mut()
.extend((content.borrow_mut()).drain(..)); .extend((content.borrow_mut()).drain(..).map(|value| value));
} }
fn get_reference(&self, refname: &str) -> Option<ElemReference> { fn get_reference(&self, refname: &str) -> Option<ElemReference> {
self.scope() self.scope()
.borrow() .borrow()
.referenceable .referenceable
.get(refname).copied() .get(refname)
.and_then(|reference| Some(*reference))
} }
fn get_from_reference( fn get_from_reference(
@ -251,61 +231,3 @@ impl<'a> DocumentAccessors<'a> for dyn Document<'a> + '_ {
.ok() .ok()
} }
} }
#[cfg(test)]
pub mod tests {
#[macro_export]
macro_rules! validate_document {
($container:expr, $idx:expr,) => {};
($container:expr, $idx:expr, $t:ty; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
$(
let val = &elem.downcast_ref::<$t>().unwrap().$field;
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
stringify!($field),
stringify!($t),
$idx,
$value,
val);
)*
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($ts:tt)* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid container element at index {}, expected {}", $idx, stringify!($t));
let contained = elem.as_container().unwrap().contained();
validate_document!(contained, 0, $($ts)*);
validate_document!($container, ($idx+1), $($tail)*);
}};
($container:expr, $idx:expr, $t:ty { $($field:ident == $value:expr),* } { $($ts:tt)* }; $($tail:tt)*) => {{
let elem = &$container[$idx];
assert!(elem.downcast_ref::<$t>().is_some(), "Invalid element at index {}, expected {}, got: {elem:#?}", $idx, stringify!($t));
$(
let val = &elem.downcast_ref::<$t>().unwrap().$field;
assert!(*val == $value, "Invalid field {} for {} at index {}, expected {:#?}, found {:#?}",
stringify!($field),
stringify!($t),
$idx,
$value,
val);
)*
let contained = elem.as_container().unwrap().contained();
validate_document!(contained, 0, $($ts)*);
validate_document!($container, ($idx+1), $($tail)*);
}};
}
}

View file

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
use crate::elements::reference::InternalReference; use crate::elements::reference::Reference;
use crate::parser::source::Token; use crate::parser::source::Token;
use downcast_rs::impl_downcast; use downcast_rs::impl_downcast;
use downcast_rs::Downcast; use downcast_rs::Downcast;
@ -34,7 +34,7 @@ impl FromStr for ElemKind {
} }
} }
pub trait Element: Downcast + core::fmt::Debug { pub trait Element: Downcast {
/// Gets the element defined location i.e token without filename /// Gets the element defined location i.e token without filename
fn location(&self) -> &Token; fn location(&self) -> &Token;
@ -43,6 +43,9 @@ pub trait Element: Downcast + core::fmt::Debug {
/// Get the element's name /// Get the element's name
fn element_name(&self) -> &'static str; fn element_name(&self) -> &'static str;
/// Outputs element to string for debug purposes
fn to_string(&self) -> String;
/// Gets the element as a referenceable i.e an element that can be referenced /// Gets the element as a referenceable i.e an element that can be referenced
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { None } fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { None }
@ -50,10 +53,16 @@ pub trait Element: Downcast + core::fmt::Debug {
fn as_container(&self) -> Option<&dyn ContainerElement> { None } fn as_container(&self) -> Option<&dyn ContainerElement> { None }
/// Compiles element /// Compiles element
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String>; fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String>;
} }
impl_downcast!(Element); impl_downcast!(Element);
impl core::fmt::Debug for dyn Element {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string())
}
}
pub trait ReferenceableElement: Element { pub trait ReferenceableElement: Element {
/// Reference name /// Reference name
fn reference_name(&self) -> Option<&String>; fn reference_name(&self) -> Option<&String>;
@ -62,17 +71,13 @@ pub trait ReferenceableElement: Element {
fn refcount_key(&self) -> &'static str; fn refcount_key(&self) -> &'static str;
/// Creates the reference element /// Creates the reference element
fn compile_reference( fn compile_reference(&self, compiler: &Compiler, document: &dyn Document, reference: &Reference, refid: usize) -> Result<String, String>;
&self, }
compiler: &Compiler,
document: &dyn Document,
reference: &InternalReference,
refid: usize,
) -> Result<String, String>;
/// Gets the refid for a compiler. The refid is some key that can be used from an external impl core::fmt::Debug for dyn ReferenceableElement {
/// document to reference this element. fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fn refid(&self, compiler: &Compiler, refid: usize) -> String; write!(f, "{}", self.to_string())
}
} }
pub trait ContainerElement: Element { pub trait ContainerElement: Element {
@ -83,6 +88,12 @@ pub trait ContainerElement: Element {
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String>; fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String>;
} }
impl core::fmt::Debug for dyn ContainerElement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.to_string())
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct DocumentEnd(pub Token); pub struct DocumentEnd(pub Token);
@ -93,7 +104,9 @@ impl Element for DocumentEnd {
fn element_name(&self) -> &'static str { "Document End" } fn element_name(&self) -> &'static str { "Document End" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(String::new()) Ok(String::new())
} }
} }

View file

@ -1,27 +1,26 @@
use std::cell::RefCell; use std::{cell::RefCell, rc::Rc};
use std::rc::Rc;
use crate::parser::source::Source; use crate::parser::source::Source;
use super::document::Document; use super::{document::{Document, Scope}, element::Element};
use super::document::Scope;
use super::element::Element;
#[derive(Debug)] #[derive(Debug)]
pub struct LangDocument<'a> { pub struct LangDocument<'a> {
source: Rc<dyn Source>, source: Rc<dyn Source>,
parent: Option<&'a dyn Document<'a>>, parent: Option<&'a dyn Document<'a>>, /// Document's parent
/// Document's parent
// FIXME: Render these fields private // FIXME: Render these fields private
pub content: RefCell<Vec<Box<dyn Element>>>, pub content: RefCell<Vec<Box<dyn Element>>>,
pub scope: RefCell<Scope>, pub scope: RefCell<Scope>,
} }
impl<'a> LangDocument<'a> { impl<'a> LangDocument<'a>
pub fn new(source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Self { {
pub fn new(source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Self
{
Self { Self {
source, source: source,
parent, parent: parent,
content: RefCell::new(Vec::new()), content: RefCell::new(Vec::new()),
scope: RefCell::new(Scope::new()), scope: RefCell::new(Scope::new()),
} }
@ -31,9 +30,7 @@ impl<'a> LangDocument<'a> {
impl<'a> Document<'a> for LangDocument<'a> { impl<'a> Document<'a> for LangDocument<'a> {
fn source(&self) -> Rc<dyn Source> { self.source.clone() } fn source(&self) -> Rc<dyn Source> { self.source.clone() }
fn parent(&self) -> Option<&'a dyn Document<'a>> { fn parent(&self) -> Option<&'a dyn Document<'a>> { self.parent.and_then(|p| Some(p as &dyn Document<'a>)) }
self.parent.map(|p| p as &dyn Document<'a>)
}
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content } fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }

View file

@ -45,7 +45,6 @@ pub mod tests {
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser; use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::parser::parser::ParserState;
#[test] #[test]
fn validate_refname_tests() { fn validate_refname_tests() {
@ -55,7 +54,7 @@ pub mod tests {
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); let doc = parser.parse(source, None);
assert_eq!(validate_refname(&*doc, " abc ", true), Ok("abc")); assert_eq!(validate_refname(&*doc, " abc ", true), Ok("abc"));
assert_eq!( assert_eq!(

View file

@ -1,14 +1,11 @@
use std::{path::PathBuf, rc::Rc};
use crate::{elements::text::Text, parser::{parser::Parser, source::{Source, Token, VirtualSource}}};
use super::document::Document; use super::document::Document;
use crate::elements::text::Text;
use crate::parser::parser::ParserState;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use std::path::PathBuf;
use std::rc::Rc;
// TODO enforce to_string(from_string(to_string())) == to_string() // TODO enforce to_string(from_string(to_string())) == to_string()
pub trait Variable { pub trait Variable
{
fn location(&self) -> &Token; fn location(&self) -> &Token;
fn name(&self) -> &str; fn name(&self) -> &str;
@ -18,17 +15,19 @@ pub trait Variable {
/// Converts variable to a string /// Converts variable to a string
fn to_string(&self) -> String; fn to_string(&self) -> String;
fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document<'a>); fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>);
} }
impl core::fmt::Debug for dyn Variable { impl core::fmt::Debug for dyn Variable
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}{{{}}}", self.name(), self.to_string()) write!(f, "{}{{{}}}", self.name(), self.to_string())
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct BaseVariable { pub struct BaseVariable
{
location: Token, location: Token,
name: String, name: String,
value: String, value: String,
@ -36,15 +35,12 @@ pub struct BaseVariable {
impl BaseVariable { impl BaseVariable {
pub fn new(location: Token, name: String, value: String) -> Self { pub fn new(location: Token, name: String, value: String) -> Self {
Self { Self { location, name, value }
location,
name,
value,
}
} }
} }
impl Variable for BaseVariable { impl Variable for BaseVariable
{
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn name(&self) -> &str { self.name.as_str() } fn name(&self) -> &str { self.name.as_str() }
@ -56,61 +52,53 @@ impl Variable for BaseVariable {
fn to_string(&self) -> String { self.value.clone() } fn to_string(&self) -> String { self.value.clone() }
fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) { fn parse<'a>(&self, _location: Token, parser: &dyn Parser, document: &'a dyn Document<'a>) {
let source = Rc::new(VirtualSource::new( let source = Rc::new(VirtualSource::new(
self.location().clone(), self.location().clone(),
self.name().to_string(), self.name().to_string(),
self.to_string(), self.to_string()));
));
state.with_state(|new_state| { parser.parse_into(source, document);
let _ = new_state.parser.parse_into(new_state, source, document);
});
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub struct PathVariable { pub struct PathVariable
{
location: Token, location: Token,
name: String, name: String,
path: PathBuf, path: PathBuf,
} }
impl PathVariable { impl PathVariable
{
pub fn new(location: Token, name: String, path: PathBuf) -> Self { pub fn new(location: Token, name: String, path: PathBuf) -> Self {
Self { Self { location, name, path }
location,
name,
path,
}
} }
} }
impl Variable for PathVariable { impl Variable for PathVariable
{
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn name(&self) -> &str { self.name.as_str() } fn name(&self) -> &str { self.name.as_str() }
fn from_string(&mut self, str: &str) -> Option<String> { fn from_string(&mut self, str: &str) -> Option<String> {
self.path = std::fs::canonicalize(str).unwrap(); self.path = PathBuf::from(std::fs::canonicalize(str).unwrap());
None None
} }
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() } fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) { fn parse<'a>(&self, location: Token, parser: &dyn Parser, document: &'a dyn Document) {
let source = Rc::new(VirtualSource::new( let source = Rc::new(VirtualSource::new(
location, location,
self.name().to_string(), self.name().to_string(),
self.to_string(), self.to_string()));
));
state.push( parser.push(document, Box::new(Text::new(
document,
Box::new(Text::new(
Token::new(0..source.content().len(), source), Token::new(0..source.content().len(), source),
self.to_string(), self.to_string()
)), )));
);
} }
} }

View file

@ -1,522 +0,0 @@
use core::fmt;
use std::any::Any;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use blockquote_style::AuthorPos::After;
use blockquote_style::AuthorPos::Before;
use blockquote_style::BlockquoteStyle;
use regex::Match;
use regex::Regex;
use runtime_format::FormatArgs;
use runtime_format::FormatError;
use runtime_format::FormatKey;
use runtime_format::FormatKeyError;
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::compiler::compiler::Target::HTML;
use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::DocumentEnd;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::style::StyleHolder;
use crate::parser::util::process_escaped;
use crate::parser::util::Property;
use crate::parser::util::PropertyParser;
#[derive(Debug)]
pub struct Blockquote {
pub(self) location: Token,
pub(self) content: Vec<Box<dyn Element>>,
pub(self) author: Option<String>,
pub(self) cite: Option<String>,
pub(self) url: Option<String>,
/// Style of the blockquote
pub(self) style: Rc<blockquote_style::BlockquoteStyle>,
}
struct FmtPair<'a>(Target, &'a Blockquote);
impl FormatKey for FmtPair<'_> {
fn fmt(&self, key: &str, f: &mut fmt::Formatter<'_>) -> Result<(), FormatKeyError> {
match key {
"author" => write!(
f,
"{}",
Compiler::sanitize(self.0, self.1.author.as_ref().unwrap_or(&"".into()))
)
.map_err(FormatKeyError::Fmt),
"cite" => write!(
f,
"{}",
Compiler::sanitize(self.0, self.1.cite.as_ref().unwrap_or(&"".into()))
)
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
}
}
}
impl Element for Blockquote {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Blockquote" }
fn compile(
&self,
compiler: &Compiler,
document: &dyn Document,
cursor: usize,
) -> Result<String, String> {
match compiler.target() {
HTML => {
let mut result = r#"<div class="blockquote-content">"#.to_string();
let format_author = || -> Result<String, String> {
let mut result = String::new();
if self.cite.is_some() || self.author.is_some() {
result += r#"<p class="blockquote-author">"#;
let fmt_pair = FmtPair(compiler.target(), self);
let format_string = match (self.author.is_some(), self.cite.is_some()) {
(true, true) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[0].as_str())
}
(true, false) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[1].as_str())
}
(false, false) => {
Compiler::sanitize_format(fmt_pair.0, self.style.format[2].as_str())
}
_ => panic!(""),
};
let args = FormatArgs::new(format_string.as_str(), &fmt_pair);
args.status().map_err(|err| {
format!("Failed to format Blockquote style `{format_string}`: {err}")
})?;
result += args.to_string().as_str();
result += "</p>";
}
Ok(result)
};
if let Some(url) = &self.url {
result += format!(r#"<blockquote cite="{}">"#, Compiler::sanitize(HTML, url))
.as_str();
} else {
result += "<blockquote>";
}
if self.style.author_pos == Before {
result += format_author()?.as_str();
}
let mut in_paragraph = false;
for elem in &self.content {
if elem.downcast_ref::<DocumentEnd>().is_some() {
} else if elem.downcast_ref::<Blockquote>().is_some() {
if in_paragraph {
result += "</p>";
in_paragraph = false;
}
result += elem
.compile(compiler, document, cursor + result.len())?
.as_str();
} else {
if !in_paragraph {
result += "<p>";
in_paragraph = true;
}
result += elem
.compile(compiler, document, cursor + result.len())?
.as_str();
}
}
if in_paragraph {
result += "</p>";
}
result += "</blockquote>";
if self.style.author_pos == After {
result += format_author().map_err(|err| err.to_string())?.as_str();
}
result += "</div>";
Ok(result)
}
_ => todo!(""),
}
}
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
impl ContainerElement for Blockquote {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.kind() == ElemKind::Block {
return Err("Cannot add block element inside a blockquote".to_string());
}
self.content.push(elem);
Ok(())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::blockquote")]
pub struct BlockquoteRule {
start_re: Regex,
continue_re: Regex,
properties: PropertyParser,
}
impl BlockquoteRule {
pub fn new() -> Self {
let mut props = HashMap::new();
props.insert(
"author".to_string(),
Property::new(false, "Quote author".to_string(), None),
);
props.insert(
"cite".to_string(),
Property::new(false, "Quote source".to_string(), None),
);
props.insert(
"url".to_string(),
Property::new(false, "Quote source url".to_string(), None),
);
Self {
start_re: Regex::new(r"(?:^|\n)>(?:\[((?:\\.|[^\\\\])*?)\])?\s*?(.*)").unwrap(),
continue_re: Regex::new(r"(?:^|\n)>\s*?(.*)").unwrap(),
properties: PropertyParser { properties: props },
}
}
fn parse_properties(
&self,
m: Match,
) -> Result<(Option<String>, Option<String>, Option<String>), String> {
let processed = process_escaped('\\', "]", m.as_str());
let pm = self.properties.parse(processed.as_str())?;
let author = pm
.get("author", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
let cite = pm
.get("cite", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
let url = pm
.get("url", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
Ok((author, cite, url))
}
}
impl Rule for BlockquoteRule {
fn name(&self) -> &'static str { "Blockquote" }
fn previous(&self) -> Option<&'static str> { Some("List") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos)
.map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let content = cursor.source.content();
let mut end_cursor = cursor.clone();
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
return (end_cursor, reports);
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
// Properties
let mut author = None;
let mut cite = None;
let mut url = None;
if let Some(properties) = captures.get(1) {
match self.parse_properties(properties) {
Err(err) => {
reports.push(
Report::build(
ReportKind::Warning,
cursor.source.clone(),
properties.start(),
)
.with_message("Invalid Blockquote Properties")
.with_label(
Label::new((cursor.source.clone(), properties.range()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
);
return (end_cursor, reports);
}
Ok(props) => (author, cite, url) = props,
}
}
// Content
let entry_start = captures.get(0).unwrap().start();
let mut entry_content = captures.get(2).unwrap().as_str().to_string();
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
let trimmed = captures.get(1).unwrap().as_str().trim_start().trim_end();
entry_content += "\n";
entry_content += trimmed;
}
// Parse entry content
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
let entry_src = Rc::new(VirtualSource::new(
token.clone(),
"Blockquote Entry".to_string(),
entry_content,
));
// Parse content
let parsed_doc = state.with_state(|new_state| {
new_state
.parser
.parse(new_state, entry_src, Some(document))
.0
});
// Extract paragraph and nested blockquotes
let mut parsed_content: Vec<Box<dyn Element>> = vec![];
for mut elem in parsed_doc.content().borrow_mut().drain(..) {
if let Some(paragraph) = elem.downcast_mut::<Paragraph>() {
if let Some(last) = parsed_content.last() {
if last.kind() == ElemKind::Inline {
parsed_content.push(Box::new(Text {
location: Token::new(
last.location().end()..last.location().end(),
last.location().source(),
),
content: " ".to_string(),
}) as Box<dyn Element>);
}
}
parsed_content.extend(std::mem::take(&mut paragraph.content));
} else if elem.downcast_ref::<Blockquote>().is_some() {
parsed_content.push(elem);
} else {
reports.push(
Report::build(ReportKind::Error, token.source(), token.range.start)
.with_message("Unable to Parse Blockquote Entry")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("Blockquotes may only contain paragraphs and other blockquotes")
.with_color(state.parser.colors().error),
)
.finish(),
);
return (end_cursor, reports);
}
}
// Get style
let style = state
.shared
.styles
.borrow()
.current(blockquote_style::STYLE_KEY)
.downcast_rc::<BlockquoteStyle>()
.unwrap();
state.push(
document,
Box::new(Blockquote {
location: Token::new(entry_start..end_cursor.pos, end_cursor.source.clone()),
content: parsed_content,
author,
cite,
url,
style,
}),
);
}
(end_cursor, reports)
}
fn register_styles(&self, holder: &mut StyleHolder) {
holder.set_current(Rc::new(BlockquoteStyle::default()));
}
}
mod blockquote_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.blockquote";
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub enum AuthorPos {
Before,
After,
None,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct BlockquoteStyle {
pub author_pos: AuthorPos,
pub format: [String; 3],
}
impl Default for BlockquoteStyle {
fn default() -> Self {
Self {
author_pos: AuthorPos::After,
format: [
"{author}, {cite}".into(),
"{author}".into(),
"{cite}".into(),
],
}
}
}
impl_elementstyle!(BlockquoteStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
BEFORE
>[author=A, cite=B, url=C] Some entry
> contin**ued here
> **
AFTER
> Another
>
> quote
>>[author=B] Nested
>>> More nested
END
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph { Text{ content == "BEFORE" }; };
Blockquote {
author == Some("A".to_string()),
cite == Some("B".to_string()),
url == Some("C".to_string())
} {
Text { content == "Some entry contin" };
Style;
Text { content == "ued here" };
Style;
};
Paragraph { Text{ content == "AFTER" }; };
Blockquote {
Text { content == "Another" };
Text { content == " " };
Text { content == "quote" };
Blockquote { author == Some("B".to_string()) } {
Text { content == "Nested" };
Blockquote {
Text { content == "More nested" };
};
};
};
Paragraph { Text{ content == "END" }; };
);
}
#[test]
pub fn style() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
@@style.blockquote = {
"author_pos": "Before",
"format": ["{cite} by {author}", "Author: {author}", "From: {cite}"]
}
PRE
>[author=A, cite=B, url=C] Some entry
> contin**ued here
> **
AFTER
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (_, state) = parser.parse(ParserState::new(&parser, None), source, None);
let style = state
.shared
.styles
.borrow()
.current(blockquote_style::STYLE_KEY)
.downcast_rc::<BlockquoteStyle>()
.unwrap();
assert_eq!(style.author_pos, Before);
assert_eq!(
style.format,
[
"{cite} by {author}".to_string(),
"Author: {author}".to_string(),
"From: {cite}".to_string()
]
);
}
}

View file

@ -25,7 +25,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -123,8 +123,8 @@ impl Code {
} }
result += result +=
"<div class=\"code-block-content\"><table cellspacing=\"0\">".to_string().as_str(); format!("<div class=\"code-block-content\"><table cellspacing=\"0\">").as_str();
for (line_id, line) in self.code.split('\n').enumerate() { for (line_id, line) in self.code.split(|c| c == '\n').enumerate() {
result += "<tr><td class=\"code-block-gutter\">"; result += "<tr><td class=\"code-block-gutter\">";
// Line number // Line number
@ -137,7 +137,7 @@ impl Code {
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Error highlighting line `{line}`: {}", "Error highlighting line `{line}`: {}",
e e.to_string()
)) ))
} }
Ok(regions) => { Ok(regions) => {
@ -146,7 +146,7 @@ impl Code {
syntect::html::IncludeBackground::No, syntect::html::IncludeBackground::No,
) { ) {
Err(e) => { Err(e) => {
return Err(format!("Error highlighting code: {}", e)) return Err(format!("Error highlighting code: {}", e.to_string()))
} }
Ok(highlighted) => { Ok(highlighted) => {
result += if highlighted.is_empty() { result += if highlighted.is_empty() {
@ -165,14 +165,14 @@ impl Code {
} else if self.block == CodeKind::MiniBlock { } else if self.block == CodeKind::MiniBlock {
result += "<div class=\"code-block\"><div class=\"code-block-content\"><table cellspacing=\"0\">"; result += "<div class=\"code-block\"><div class=\"code-block-content\"><table cellspacing=\"0\">";
for line in self.code.split('\n') { for line in self.code.split(|c| c == '\n') {
result += "<tr><td class=\"code-block-line\"><pre>"; result += "<tr><td class=\"code-block-line\"><pre>";
// Code // Code
match h.highlight_line(line, Code::get_syntaxes()) { match h.highlight_line(line, Code::get_syntaxes()) {
Err(e) => { Err(e) => {
return Err(format!( return Err(format!(
"Error highlighting line `{line}`: {}", "Error highlighting line `{line}`: {}",
e e.to_string()
)) ))
} }
Ok(regions) => { Ok(regions) => {
@ -181,7 +181,7 @@ impl Code {
syntect::html::IncludeBackground::No, syntect::html::IncludeBackground::No,
) { ) {
Err(e) => { Err(e) => {
return Err(format!("Error highlighting code: {}", e)) return Err(format!("Error highlighting code: {}", e.to_string()))
} }
Ok(highlighted) => { Ok(highlighted) => {
result += if highlighted.is_empty() { result += if highlighted.is_empty() {
@ -203,7 +203,7 @@ impl Code {
return Err(format!( return Err(format!(
"Error highlighting line `{}`: {}", "Error highlighting line `{}`: {}",
self.code, self.code,
e e.to_string()
)) ))
} }
Ok(regions) => { Ok(regions) => {
@ -212,7 +212,7 @@ impl Code {
syntect::html::IncludeBackground::No, syntect::html::IncludeBackground::No,
) { ) {
Err(e) => { Err(e) => {
return Err(format!("Error highlighting code: {}", e)) return Err(format!("Error highlighting code: {}", e.to_string()))
} }
Ok(highlighted) => result += highlighted.as_str(), Ok(highlighted) => result += highlighted.as_str(),
} }
@ -244,10 +244,11 @@ impl Cached for Code {
fn key(&self) -> <Self as Cached>::Key { fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new(); let mut hasher = Sha512::new();
hasher.input((self.block as usize).to_be_bytes().as_slice()); hasher.input((self.block as usize).to_be_bytes().as_slice());
hasher.input(self.line_offset.to_be_bytes().as_slice()); hasher.input((self.line_offset as usize).to_be_bytes().as_slice());
if let Some(theme) = self.theme self.theme
.as_ref() { hasher.input(theme.as_bytes()) } .as_ref()
if let Some(name) = self.name.as_ref() { hasher.input(name.as_bytes()) } .map(|theme| hasher.input(theme.as_bytes()));
self.name.as_ref().map(|name| hasher.input(name.as_bytes()));
hasher.input(self.language.as_bytes()); hasher.input(self.language.as_bytes());
hasher.input(self.code.as_bytes()); hasher.input(self.code.as_bytes());
@ -262,20 +263,22 @@ impl Element for Code {
fn element_name(&self) -> &'static str { "Code Block" } fn element_name(&self) -> &'static str { "Code Block" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
static CACHE_INIT: Once = Once::new(); static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| { CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() { if let Some(mut con) = compiler.cache() {
if let Err(e) = Code::init(con) { if let Err(e) = Code::init(&mut con) {
eprintln!("Unable to create cache table: {e}"); eprintln!("Unable to create cache table: {e}");
} }
} }
}); });
if let Some(con) = compiler.cache() { if let Some(mut con) = compiler.cache() {
match self.cached(con, |s| s.highlight_html(compiler)) { match self.cached(&mut con, |s| s.highlight_html(compiler)) {
Ok(s) => Ok(s), Ok(s) => Ok(s),
Err(e) => match e { Err(e) => match e {
CachedError::SqlErr(e) => { CachedError::SqlErr(e) => {
@ -295,7 +298,6 @@ impl Element for Code {
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::code")]
pub struct CodeRule { pub struct CodeRule {
re: [Regex; 2], re: [Regex; 2],
properties: PropertyParser, properties: PropertyParser,
@ -323,22 +325,21 @@ impl CodeRule {
) )
.unwrap(), .unwrap(),
], ],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
} }
impl RegexRule for CodeRule { impl RegexRule for CodeRule {
fn name(&self) -> &'static str { "Code" } fn name(&self) -> &'static str { "Code" }
fn previous(&self) -> Option<&'static str> { Some("Blockquote") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match( fn on_regex_match<'a>(
&self, &self,
index: usize, index: usize,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &'a dyn Document,
token: Token, token: Token,
matches: Captures, matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
@ -354,7 +355,7 @@ impl RegexRule for CodeRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Code is missing properties: {e}")) .with_message(format!("Code is missing properties: {e}"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -372,7 +373,7 @@ impl RegexRule for CodeRule {
.with_label( .with_label(
Label::new((token.source().clone(), props.range())) Label::new((token.source().clone(), props.range()))
.with_message(e) .with_message(e)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -394,7 +395,7 @@ impl RegexRule for CodeRule {
.with_label( .with_label(
Label::new((token.source().clone(), lang.range())) Label::new((token.source().clone(), lang.range()))
.with_message("No language specified") .with_message("No language specified")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -412,9 +413,9 @@ impl RegexRule for CodeRule {
Label::new((token.source().clone(), lang.range())) Label::new((token.source().clone(), lang.range()))
.with_message(format!( .with_message(format!(
"Language `{}` cannot be found", "Language `{}` cannot be found",
code_lang.fg(state.parser.colors().info) code_lang.fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -431,7 +432,7 @@ impl RegexRule for CodeRule {
} else { } else {
util::process_escaped('\\', "``", matches.get(3).unwrap().as_str()) util::process_escaped('\\', "``", matches.get(3).unwrap().as_str())
}; };
if code_content.bytes().last() == Some(b'\n') if code_content.bytes().last() == Some('\n' as u8)
// Remove newline // Remove newline
{ {
code_content.pop(); code_content.pop();
@ -444,7 +445,7 @@ impl RegexRule for CodeRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message("Code content cannot be empty") .with_message("Code content cannot be empty")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -452,7 +453,8 @@ impl RegexRule for CodeRule {
} }
let theme = document let theme = document
.get_variable("code.theme").map(|var| var.to_string()); .get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
if index == 0 if index == 0
// Block // Block
@ -475,9 +477,9 @@ impl RegexRule for CodeRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.start()+1..token.end())) Label::new((token.source().clone(), token.start()+1..token.end()))
.with_message(format!("Property `line_offset: {}` cannot be converted: {}", .with_message(format!("Property `line_offset: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(parser.colors().info),
err.fg(state.parser.colors().error))) err.fg(parser.colors().error)))
.with_color(state.parser.colors().warning)) .with_color(parser.colors().warning))
.finish()); .finish());
return reports; return reports;
} }
@ -492,9 +494,9 @@ impl RegexRule for CodeRule {
)) ))
.with_message(format!( .with_message(format!(
"Property `{}` doesn't exist", "Property `{}` doesn't exist",
err.fg(state.parser.colors().info) err.fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -504,7 +506,7 @@ impl RegexRule for CodeRule {
} }
}; };
state.push( parser.push(
document, document,
Box::new(Code::new( Box::new(Code::new(
token.clone(), token.clone(),
@ -525,7 +527,7 @@ impl RegexRule for CodeRule {
CodeKind::Inline CodeKind::Inline
}; };
state.push( parser.push(
document, document,
Box::new(Code::new( Box::new(Code::new(
token.clone(), token.clone(),
@ -542,7 +544,8 @@ impl RegexRule for CodeRule {
reports reports
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { // TODO
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![]; let mut bindings = vec![];
bindings.push(( bindings.push((
"push_inline".to_string(), "push_inline".to_string(),
@ -551,9 +554,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
let theme = ctx let theme = ctx
.document .document
.get_variable("code.theme").map(|var| var.to_string()); .get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push( ctx.parser.push(
ctx.document, ctx.document,
Box::new(Code { Box::new(Code {
location: ctx.location.clone(), location: ctx.location.clone(),
@ -581,9 +585,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
let theme = ctx let theme = ctx
.document .document
.get_variable("code.theme").map(|var| var.to_string()); .get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push( ctx.parser.push(
ctx.document, ctx.document,
Box::new(Code { Box::new(Code {
location: ctx.location.clone(), location: ctx.location.clone(),
@ -618,9 +623,10 @@ impl RegexRule for CodeRule {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
let theme = ctx let theme = ctx
.document .document
.get_variable("code.theme").map(|var| var.to_string()); .get_variable("code.theme")
.and_then(|var| Some(var.to_string()));
ctx.state.push( ctx.parser.push(
ctx.document, ctx.document,
Box::new(Code { Box::new(Code {
location: ctx.location.clone(), location: ctx.location.clone(),
@ -649,7 +655,6 @@ impl RegexRule for CodeRule {
mod tests { mod tests {
use super::*; use super::*;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
#[test] #[test]
@ -661,7 +666,7 @@ mod tests {
static int INT32_MIN = 0x80000000; static int INT32_MIN = 0x80000000;
``` ```
%<nml.code.push_block("Lua", "From Lua", "print(\"Hello, World!\")", nil)>% %<nml.code.push_block("Lua", "From Lua", "print(\"Hello, World!\")", nil)>%
``Rust, ``Rust
fn fact(n: usize) -> usize fn fact(n: usize) -> usize
{ {
match n match n
@ -677,7 +682,8 @@ fn fact(n: usize) -> usize
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); //let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow(); let borrow = doc.content().borrow();
let found = borrow let found = borrow
@ -723,7 +729,8 @@ fn fact(n: usize) -> usize
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); //let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
let borrow = doc.content().borrow(); let borrow = doc.content().borrow();
let found = borrow let found = borrow

View file

@ -1,131 +1,84 @@
use crate::compiler::compiler::Compiler; use mlua::{Function, Lua};
use crate::document::document::Document; use regex::{Captures, Regex};
use crate::document::element::ElemKind; use crate::{document::document::Document, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}};
use crate::document::element::Element; use ariadne::{Report, Label, ReportKind};
use crate::parser::parser::ParserState; use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}};
use crate::parser::rule::RegexRule; use std::{ops::Range, rc::Rc};
use crate::parser::source::Source;
use crate::parser::source::Token;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use regex::Captures;
use regex::Regex;
use std::ops::Range;
use std::rc::Rc;
#[derive(Debug)] #[derive(Debug)]
pub struct Comment { pub struct Comment {
pub location: Token, location: Token,
#[allow(unused)] content: String,
pub content: String,
} }
impl Element for Comment { impl Comment
{
pub fn new(location: Token, content: String ) -> Self {
Self { location: location, content }
}
}
impl Element for Comment
{
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Invisible } fn kind(&self) -> ElemKind { ElemKind::Invisible }
fn element_name(&self) -> &'static str { "Comment" } fn element_name(&self) -> &'static str { "Comment" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document)
-> Result<String, String> {
Ok("".to_string()) Ok("".to_string())
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::comment")]
pub struct CommentRule { pub struct CommentRule {
re: [Regex; 1], re: [Regex; 1],
} }
impl CommentRule { impl CommentRule {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self { re: [Regex::new(r"\s*::(.*)").unwrap()] }
re: [Regex::new(r"(?:(?:^|\n)|[^\S\n]+)::(.*)").unwrap()],
}
} }
} }
impl RegexRule for CommentRule { impl RegexRule for CommentRule {
fn name(&self) -> &'static str { "Comment" } fn name(&self) -> &'static str { "Comment" }
fn previous(&self) -> Option<&'static str> { None }
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match( fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: Captures)
&self, -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
_: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![]; let mut reports = vec![];
let content = match matches.get(1) { let content = match matches.get(1)
{
None => panic!("Unknown error"), None => panic!("Unknown error"),
Some(comment) => { Some(comment) => {
let trimmed = comment.as_str().trim_start().trim_end().to_string(); let trimmed = comment.as_str().trim_start().trim_end().to_string();
if trimmed.is_empty() { if trimmed.is_empty()
{
reports.push( reports.push(
Report::build(ReportKind::Warning, token.source(), comment.start()) Report::build(ReportKind::Warning, token.source(), comment.start())
.with_message("Empty comment") .with_message("Empty comment")
.with_label( .with_label(
Label::new((token.source(), comment.range())) Label::new((token.source(), comment.range()))
.with_message("Comment is empty") .with_message("Comment is empty")
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning))
) .finish());
.finish(),
);
} }
trimmed trimmed
} }
}; };
state.push( parser.push(document, Box::new(
document, Comment::new(
Box::new(Comment { token.clone(),
location: token.clone(), content
content, )
}),
);
reports
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
NOT COMMENT: `std::cmp`
:: Commented line
COMMENT ::Test
"#
.to_string(),
None,
)); ));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0, return reports;
Paragraph {
Text; Style; Text; Style;
Comment { content == "Commented line" };
Text; Comment { content == "Test" };
};
);
} }
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
} }

View file

@ -1,575 +0,0 @@
use crate::lua::kernel::Kernel;
use std::any::Any;
use std::cell::Ref;
use std::cell::RefCell;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::lua::kernel::KernelContext;
use crate::lua::kernel::CTX;
use crate::parser::customstyle::CustomStyle;
use crate::parser::customstyle::CustomStyleToken;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use crate::parser::state::Scope;
use super::paragraph::Paragraph;
#[derive(Debug)]
struct LuaCustomStyle {
pub(self) name: String,
pub(self) tokens: CustomStyleToken,
pub(self) start: String,
pub(self) end: String,
}
impl CustomStyle for LuaCustomStyle {
fn name(&self) -> &str { self.name.as_str() }
fn tokens(&self) -> &CustomStyleToken { &self.tokens }
fn on_start<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a dyn Document<'a>,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
state,
document,
};
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.start.as_str());
if let Err(err) = chunk.eval::<()>() {
reports.push(
Report::build(ReportKind::Error, location.source(), location.start())
.with_message("Lua execution failed")
.with_label(
Label::new((location.source(), location.range.clone()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.with_note(format!(
"When trying to start custom style {}",
self.name().fg(state.parser.colors().info)
))
.finish(),
);
}
});
reports
}
fn on_end<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a dyn Document<'a>,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
state,
document,
};
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
let chunk = lua.load(self.end.as_str());
if let Err(err) = chunk.eval::<()>() {
reports.push(
Report::build(ReportKind::Error, location.source(), location.start())
.with_message("Lua execution failed")
.with_label(
Label::new((location.source(), location.range.clone()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.with_note(format!(
"When trying to end custom style {}",
self.name().fg(state.parser.colors().info)
))
.finish(),
);
}
});
reports
}
}
struct CustomStyleState {
toggled: HashMap<String, Token>,
}
impl RuleState for CustomStyleState {
fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove<'a>(
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
self.toggled.iter().for_each(|(style, token)| {
let paragraph = document.last_element::<Paragraph>().unwrap();
let paragraph_end = paragraph
.content
.last().map(|last| (
last.location().source(),
last.location().end() - 1..last.location().end(),
))
.unwrap();
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated Custom Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Style {} starts here",
style.fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new(paragraph_end)
.with_order(1)
.with_message("Paragraph ends here".to_string())
.with_color(state.parser.colors().error),
)
.with_note("Styles cannot span multiple documents (i.e @import)")
.finish(),
);
});
reports
}
}
static STATE_NAME: &str = "elements.custom_style";
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::customstyle")]
pub struct CustomStyleRule;
impl CustomStyleRule {
pub fn new() -> Self { Self{} }
}
impl Rule for CustomStyleRule {
fn name(&self) -> &'static str { "Custom Style" }
fn previous(&self) -> Option<&'static str> { Some("Style") }
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
let content = cursor.source.content();
let mut closest_match = usize::MAX;
let mut matched_style = (None, false);
state
.shared
.custom_styles
.borrow()
.iter()
.for_each(|(_name, style)| match style.tokens() {
CustomStyleToken::Toggle(s) => {
if let Some(pos) = &content[cursor.pos..].find(s) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), false);
}
}
}
CustomStyleToken::Pair(begin, end) => {
if let Some(pos) = &content[cursor.pos..].find(begin) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), false);
}
}
if let Some(pos) = &content[cursor.pos..].find(end) {
if *pos < closest_match {
closest_match = *pos;
matched_style = (Some(style.clone()), true);
}
}
}
});
if closest_match == usize::MAX {
None
} else {
Some((
closest_match + cursor.pos,
Box::new((matched_style.0.unwrap().clone(), matched_style.1)) as Box<dyn Any>,
))
}
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a dyn Document<'a>,
cursor: Cursor,
match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let (style, end) = match_data
.downcast_ref::<(Rc<dyn CustomStyle>, bool)>()
.unwrap();
let mut rule_state_borrow = state.shared.rule_state.borrow_mut();
let style_state = match rule_state_borrow.get(STATE_NAME) {
Some(rule_state) => rule_state,
// Insert as a new state
None => {
match rule_state_borrow.insert(
STATE_NAME.into(),
Rc::new(RefCell::new(CustomStyleState {
toggled: HashMap::new(),
})),
) {
Err(err) => panic!("{err}"),
Ok(rule_state) => rule_state,
}
}
};
let (close, token) = match style.tokens() {
CustomStyleToken::Toggle(s) => {
let mut borrow = style_state.as_ref().borrow_mut();
let style_state = borrow.downcast_mut::<CustomStyleState>().unwrap();
if style_state.toggled.get(style.name()).is_some() {
// Terminate style
let token = Token::new(cursor.pos..cursor.pos + s.len(), cursor.source.clone());
style_state.toggled.remove(style.name());
(true, token)
} else {
// Start style
let token = Token::new(cursor.pos..cursor.pos + s.len(), cursor.source.clone());
style_state
.toggled
.insert(style.name().into(), token.clone());
(false, token)
}
}
CustomStyleToken::Pair(s_begin, s_end) => {
let mut borrow = style_state.borrow_mut();
let style_state = borrow.downcast_mut::<CustomStyleState>().unwrap();
if *end {
// Terminate style
let token =
Token::new(cursor.pos..cursor.pos + s_end.len(), cursor.source.clone());
if style_state.toggled.get(style.name()).is_none() {
return (
cursor.at(cursor.pos + s_end.len()),
vec![
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid End of Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Cannot end style {} here, is it not started anywhere",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
],
);
}
style_state.toggled.remove(style.name());
(true, token)
} else {
// Start style
let token = Token::new(
cursor.pos..cursor.pos + s_begin.len(),
cursor.source.clone(),
);
if let Some(start_token) = style_state.toggled.get(style.name()) {
return (
cursor.at(cursor.pos + s_end.len()),
vec![Report::build(
ReportKind::Error,
start_token.source(),
start_token.start(),
)
.with_message("Invalid Start of Style")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_order(1)
.with_message(format!(
"Style cannot {} starts here",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new((start_token.source(), start_token.range.clone()))
.with_order(2)
.with_message(format!(
"Style {} starts previously here",
style.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish()],
);
}
style_state
.toggled
.insert(style.name().into(), token.clone());
(false, token)
}
}
};
let reports = if close {
style.on_end(token.clone(), state, document)
} else {
style.on_start(token.clone(), state, document)
};
(cursor.at(token.end()), unsafe {
std::mem::transmute(reports)
})
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"define_toggled".into(),
lua.create_function(
|_, (name, token, on_start, on_end): (String, String, String, String)| {
let mut result = Ok(());
let style = LuaCustomStyle {
tokens: CustomStyleToken::Toggle(token),
name: name.clone(),
start: on_start,
end: on_end,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(_) =
ctx.state.shared.custom_styles.borrow().get(name.as_str())
{
result = Err(BadArgument {
to: Some("define_toggled".to_string()),
pos: 1,
name: Some("name".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` already exists"
))),
});
return;
}
ctx.state
.shared
.custom_styles
.borrow_mut()
.insert(Rc::new(style));
ctx.state.reset_match("Custom Style").unwrap();
});
});
result
},
)
.unwrap(),
));
bindings.push((
"define_paired".into(),
lua.create_function(
|_,
(name, token_start, token_end, on_start, on_end): (
String,
String,
String,
String,
String,
)| {
let mut result = Ok(());
if token_start == token_end
{
return Err(BadArgument {
to: Some("define_paired".to_string()),
pos: 3,
name: Some("token_end".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` cannot be defined: The start token must differ from the end token, use `define_toggled` insteda"
))),
});
}
let style = LuaCustomStyle {
tokens: CustomStyleToken::Pair(token_start, token_end),
name: name.clone(),
start: on_start,
end: on_end,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(_) = ctx.state.shared.custom_styles.borrow().get(name.as_str()) {
result = Err(BadArgument {
to: Some("define_paired".to_string()),
pos: 1,
name: Some("name".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Custom style with name `{name}` already exists"
))),
});
return;
}
ctx.state.shared.custom_styles.borrow_mut().insert(Rc::new(style));
ctx.state.reset_match("Custom Style").unwrap();
});
});
result
},
)
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::elements::raw::Raw;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn toggle() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<[main]
function my_style_start()
nml.raw.push("inline", "start")
end
function my_style_end()
nml.raw.push("inline", "end")
end
function red_style_start()
nml.raw.push("inline", "<a style=\"color:red\">")
end
function red_style_end()
nml.raw.push("inline", "</a>")
end
nml.custom_style.define_toggled("My Style", "|", "my_style_start()", "my_style_end()")
nml.custom_style.define_toggled("My Style2", "°", "red_style_start()", "red_style_end()")
>%
pre |styled| post °Hello°.
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "pre " };
Raw { content == "start" };
Text { content == "styled" };
Raw { content == "end" };
Text { content == " post " };
Raw { content == "<a style=\"color:red\">" };
Text { content == "Hello" };
Raw { content == "</a>" };
Text { content == "." };
};
);
}
#[test]
fn paired() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<[main]
function my_style_start()
nml.raw.push("inline", "start")
end
function my_style_end()
nml.raw.push("inline", "end")
end
function red_style_start()
nml.raw.push("inline", "<a style=\"color:red\">")
end
function red_style_end()
nml.raw.push("inline", "</a>")
end
nml.custom_style.define_paired("My Style", "[", "]", "my_style_start()", "my_style_end()")
nml.custom_style.define_paired("My Style2", "(", ")", "red_style_start()", "red_style_end()")
>%
pre [styled] post (Hello).
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "pre " };
Raw { content == "start" };
Text { content == "styled" };
Raw { content == "end" };
Text { content == " post " };
Raw { content == "<a style=\"color:red\">" };
Text { content == "Hello" };
Raw { content == "</a>" };
Text { content == "." };
};
);
}
}

View file

@ -1,224 +0,0 @@
use crate::parser::style::ElementStyle;
use std::any::Any;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use mlua::Value;
use regex::Regex;
use crate::document::document::Document;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::elemstyle")]
pub struct ElemStyleRule {
start_re: Regex,
}
impl ElemStyleRule {
pub fn new() -> Self {
Self {
start_re: Regex::new(r"(?:^|\n)@@(.*?)=\s*\{").unwrap(),
}
}
/// Finds the json substring inside aother string
pub fn json_substring(str: &str) -> Option<&str> {
let mut in_string = false;
let mut brace_depth = 0;
let mut escaped = false;
for (pos, c) in str.char_indices() {
match c {
'{' if !in_string => brace_depth += 1,
'}' if !in_string => brace_depth -= 1,
'\\' if in_string => escaped = !escaped,
'"' if !escaped => in_string = !in_string,
_ => escaped = false,
}
if brace_depth == 0 {
return Some(&str[..=pos]);
}
}
None
}
}
impl Rule for ElemStyleRule {
fn name(&self) -> &'static str { "Element Style" }
fn previous(&self) -> Option<&'static str> { Some("Script") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
}
fn on_match<'a>(
&self,
state: &ParserState,
_document: &'a (dyn Document<'a> + 'a),
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let matches = self
.start_re
.captures_at(cursor.source.content(), cursor.pos)
.unwrap();
let mut cursor = cursor.at(matches.get(0).unwrap().end() - 1);
let style: Rc<dyn ElementStyle> = if let Some(key) = matches.get(1) {
let trimmed = key.as_str().trim_start().trim_end();
// Check if empty
if trimmed.is_empty() {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), key.start())
.with_message("Empty Style Key")
.with_label(
Label::new((cursor.source.clone(), key.range()))
.with_message("Expected a non-empty style key".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
// Check if key exists
if !state.shared.styles.borrow().is_registered(trimmed) {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), key.start())
.with_message("Unknown Style Key")
.with_label(
Label::new((cursor.source.clone(), key.range()))
.with_message(format!(
"Could not find a style with key: {}",
trimmed.fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
state.shared.styles.borrow().current(trimmed)
} else {
panic!("Unknown error")
};
// Get value
let new_style = match ElemStyleRule::json_substring(
&cursor.source.clone().content().as_str()[cursor.pos..],
) {
None => {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), cursor.pos)
.with_message("Invalid Style Value")
.with_label(
Label::new((cursor.source.clone(), matches.get(0).unwrap().range()))
.with_message("Unable to parse json string after style key".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
Some(json) => {
cursor = cursor.at(cursor.pos + json.len());
// Attempt to deserialize
match style.from_json(json) {
Err(err) => {
reports.push(
Report::build(ReportKind::Error, cursor.source.clone(), cursor.pos)
.with_message("Invalid Style Value")
.with_label(
Label::new((
cursor.source.clone(),
cursor.pos..cursor.pos + json.len(),
))
.with_message(format!(
"Failed to serialize `{}` into style with key `{}`: {err}",
json.fg(state.parser.colors().highlight),
style.key().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return (cursor, reports);
}
Ok(style) => style,
}
}
};
state.shared.styles.borrow_mut().set_current(new_style);
(cursor, reports)
}
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"set".to_string(),
lua.create_function(|lua, (style_key, new_style): (String, Value)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if !ctx
.state
.shared
.styles
.borrow()
.is_registered(style_key.as_str())
{
result = Err(BadArgument {
to: Some("set".to_string()),
pos: 1,
name: Some("style_key".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to find style with key: {style_key}"
))),
});
return;
}
let style = ctx.state.shared.styles.borrow().current(style_key.as_str());
let new_style = match style.from_lua(lua, new_style) {
Err(err) => {
result = Err(err);
return;
}
Ok(new_style) => new_style,
};
ctx.state.shared.styles.borrow_mut().set_current(new_style);
})
});
result
})
.unwrap(),
));
bindings
}
}

View file

@ -1,11 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc;
use std::sync::Once; use std::sync::Once;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMapError; use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
@ -18,7 +15,6 @@ use crypto::sha2::Sha512;
use graphviz_rust::cmd::Format; use graphviz_rust::cmd::Format;
use graphviz_rust::cmd::Layout; use graphviz_rust::cmd::Layout;
use graphviz_rust::exec_dot; use graphviz_rust::exec_dot;
use mlua::Error::BadArgument;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Captures; use regex::Captures;
@ -31,6 +27,7 @@ use crate::compiler::compiler::Target;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -42,6 +39,7 @@ struct Graphviz {
pub dot: String, pub dot: String,
pub layout: Layout, pub layout: Layout,
pub width: String, pub width: String,
pub caption: Option<String>,
} }
fn layout_from_str(value: &str) -> Result<Layout, String> { fn layout_from_str(value: &str) -> Result<Layout, String> {
@ -102,7 +100,6 @@ impl Cached for Graphviz {
fn key(&self) -> <Self as Cached>::Key { fn key(&self) -> <Self as Cached>::Key {
let mut hasher = Sha512::new(); let mut hasher = Sha512::new();
hasher.input((self.layout as usize).to_be_bytes().as_slice()); hasher.input((self.layout as usize).to_be_bytes().as_slice());
hasher.input(self.width.as_bytes());
hasher.input(self.dot.as_bytes()); hasher.input(self.dot.as_bytes());
hasher.result_str() hasher.result_str()
@ -116,26 +113,23 @@ impl Element for Graphviz {
fn element_name(&self) -> &'static str { "Graphviz" } fn element_name(&self) -> &'static str { "Graphviz" }
fn compile( fn to_string(&self) -> String { format!("{self:#?}") }
&self,
compiler: &Compiler, fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
_document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
static CACHE_INIT: Once = Once::new(); static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| { CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() { if let Some(mut con) = compiler.cache() {
if let Err(e) = Graphviz::init(con) { if let Err(e) = Graphviz::init(&mut con) {
eprintln!("Unable to create cache table: {e}"); eprintln!("Unable to create cache table: {e}");
} }
} }
}); });
// TODO: Format svg in a div // TODO: Format svg in a div
if let Some(con) = compiler.cache() { if let Some(mut con) = compiler.cache() {
match self.cached(con, |s| s.dot_to_svg()) { match self.cached(&mut con, |s| s.dot_to_svg()) {
Ok(s) => Ok(s), Ok(s) => Ok(s),
Err(e) => match e { Err(e) => match e {
CachedError::SqlErr(e) => { CachedError::SqlErr(e) => {
@ -156,7 +150,6 @@ impl Element for Graphviz {
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::graphviz")]
pub struct GraphRule { pub struct GraphRule {
re: [Regex; 1], re: [Regex; 1],
properties: PropertyParser, properties: PropertyParser,
@ -175,28 +168,31 @@ impl GraphRule {
); );
props.insert( props.insert(
"width".to_string(), "width".to_string(),
Property::new(true, "SVG width".to_string(), Some("100%".to_string())), Property::new(
true,
"SVG width".to_string(),
Some("100%".to_string()),
),
); );
Self { Self {
re: [Regex::new( re: [Regex::new(
r"\[graph\](?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\[/graph\])?", r"\[graph\](?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\[/graph\])?",
) )
.unwrap()], .unwrap()],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
} }
impl RegexRule for GraphRule { impl RegexRule for GraphRule {
fn name(&self) -> &'static str { "Graphviz" } fn name(&self) -> &'static str { "Graph" }
fn previous(&self) -> Option<&'static str> { Some("Tex") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match( fn on_regex_match(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -213,10 +209,10 @@ impl RegexRule for GraphRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Missing terminating `{}` after first `{}`", "Missing terminating `{}` after first `{}`",
"[/graph]".fg(state.parser.colors().info), "[/graph]".fg(parser.colors().info),
"[graph]".fg(state.parser.colors().info) "[graph]".fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -236,7 +232,7 @@ impl RegexRule for GraphRule {
.with_label( .with_label(
Label::new((token.source().clone(), content.range())) Label::new((token.source().clone(), content.range()))
.with_message("Graph code is empty") .with_message("Graph code is empty")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -257,7 +253,7 @@ impl RegexRule for GraphRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Graph is missing property: {e}")) .with_message(format!("Graph is missing property: {e}"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -275,7 +271,7 @@ impl RegexRule for GraphRule {
.with_label( .with_label(
Label::new((token.source().clone(), props.range())) Label::new((token.source().clone(), props.range()))
.with_message(e) .with_message(e)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -300,10 +296,10 @@ impl RegexRule for GraphRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Property `layout: {}` cannot be converted: {}", "Property `layout: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(parser.colors().info),
err.fg(state.parser.colors().error) err.fg(parser.colors().error)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -319,7 +315,7 @@ impl RegexRule for GraphRule {
token.start() + 1..token.end(), token.start() + 1..token.end(),
)) ))
.with_message(err) .with_message(err)
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -346,126 +342,34 @@ impl RegexRule for GraphRule {
)) ))
.with_message(format!( .with_message(format!(
"Property `{}` is missing", "Property `{}` is missing",
err.fg(state.parser.colors().info) err.fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
return reports; return reports;
} }
_ => panic!("Unknown error"), _ => panic!("Unknown error")
}, },
}; };
state.push( // TODO: Caption
parser.push(
document, document,
Box::new(Graphviz { Box::new(Graphviz {
location: token, location: token,
dot: graph_content, dot: graph_content,
layout: graph_layout, layout: graph_layout,
width: graph_width, width: graph_width,
caption: None,
}), }),
); );
reports reports
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { // TODO
let mut bindings = vec![]; fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
bindings.push((
"push".to_string(),
lua.create_function(|_, (layout, width, dot): (String, String, String)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let layout = match layout_from_str(layout.as_str()) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get layout type: {err}"
))),
});
return;
}
Ok(layout) => layout,
};
ctx.state.push(
ctx.document,
Box::new(Graphviz {
location: ctx.location.clone(),
dot,
layout,
width,
}),
);
})
});
result
})
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parse() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
[graph][width=200px, layout=neato]
Some graph...
[/graph]
[graph]
Another graph
[/graph]
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Graphviz { width == "200px", dot == "Some graph..." };
Graphviz { dot == "Another graph" };
);
}
#[test]
pub fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<nml.graphviz.push("neato", "200px", "Some graph...")>%
%<nml.graphviz.push("dot", "", "Another graph")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Graphviz { width == "200px", dot == "Some graph..." };
Graphviz { dot == "Another graph" };
);
}
} }

View file

@ -1,6 +1,6 @@
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::document::DocumentAccessors; use crate::document::document::DocumentAccessors;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -10,6 +10,8 @@ use ariadne::Fmt;
use ariadne::Label; use ariadne::Label;
use ariadne::Report; use ariadne::Report;
use ariadne::ReportKind; use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::ops::Range; use std::ops::Range;
@ -17,7 +19,6 @@ use std::rc::Rc;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::import")]
pub struct ImportRule { pub struct ImportRule {
re: [Regex; 1], re: [Regex; 1],
} }
@ -41,14 +42,13 @@ impl ImportRule {
impl RegexRule for ImportRule { impl RegexRule for ImportRule {
fn name(&self) -> &'static str { "Import" } fn name(&self) -> &'static str { "Import" }
fn previous(&self) -> Option<&'static str> { Some("Paragraph") }
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a dyn Document<'a>, document: &'a dyn Document<'a>,
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -57,7 +57,7 @@ impl RegexRule for ImportRule {
// Path // Path
let import_file = match matches.get(2) { let import_file = match matches.get(2) {
Some(name) => match ImportRule::validate_name(state.parser.colors(), name.as_str()) { Some(name) => match ImportRule::validate_name(parser.colors(), name.as_str()) {
Err(msg) => { Err(msg) => {
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
@ -66,9 +66,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!(
"Import name `{}` is invalid. {msg}", "Import name `{}` is invalid. {msg}",
name.as_str().fg(state.parser.colors().highlight) name.as_str().fg(parser.colors().highlight)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -85,9 +85,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!(
"Unable to access file `{}`", "Unable to access file `{}`",
filename.fg(state.parser.colors().highlight) filename.fg(parser.colors().highlight)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -104,9 +104,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!(
"Path `{}` is not a file!", "Path `{}` is not a file!",
filename.fg(state.parser.colors().highlight) filename.fg(parser.colors().highlight)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -121,8 +121,7 @@ impl RegexRule for ImportRule {
// [Optional] import as // [Optional] import as
let import_as = match matches.get(1) { let import_as = match matches.get(1) {
Some(as_name) => match ImportRule::validate_as(state.parser.colors(), as_name.as_str()) Some(as_name) => match ImportRule::validate_as(parser.colors(), as_name.as_str()) {
{
Ok(as_name) => as_name, Ok(as_name) => as_name,
Err(msg) => { Err(msg) => {
result.push( result.push(
@ -132,9 +131,9 @@ impl RegexRule for ImportRule {
Label::new((token.source(), as_name.range())) Label::new((token.source(), as_name.range()))
.with_message(format!( .with_message(format!(
"Canot import `{import_file}` as `{}`. {msg}", "Canot import `{import_file}` as `{}`. {msg}",
as_name.as_str().fg(state.parser.colors().highlight) as_name.as_str().fg(parser.colors().highlight)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -154,7 +153,7 @@ impl RegexRule for ImportRule {
.with_label( .with_label(
Label::new((token.source(), token.range)) Label::new((token.source(), token.range))
.with_message(format!("Failed to read content from path `{path}`")) .with_message(format!("Failed to read content from path `{path}`"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -162,15 +161,12 @@ impl RegexRule for ImportRule {
} }
}; };
state.with_state(|new_state| { let import_doc = parser.parse(import, Some(document));
let (import_doc, _) = new_state.parser.parse(new_state, import, Some(document));
document.merge(import_doc.content(), import_doc.scope(), Some(&import_as)); document.merge(import_doc.content(), import_doc.scope(), Some(&import_as));
});
// Close paragraph // Close paragraph
// TODO2: Check if this is safe to remove if document.last_element::<Paragraph>().is_some() {
if document.last_element::<Paragraph>().is_none() { parser.push(
state.push(
document, document,
Box::new(Paragraph { Box::new(Paragraph {
location: Token::new(token.end()..token.end(), token.source()), location: Token::new(token.end()..token.end(), token.source()),
@ -179,6 +175,8 @@ impl RegexRule for ImportRule {
); );
} }
result return result;
} }
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
} }

View file

@ -1,980 +0,0 @@
use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target;
use crate::document::document::Document;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::layout::LayoutHolder;
use crate::parser::layout::LayoutType;
use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use crate::parser::state::Scope;
use crate::parser::util::process_escaped;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
use regex::Captures;
use regex::Match;
use regex::Regex;
use regex::RegexBuilder;
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::str::FromStr;
use std::sync::Arc;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum LayoutToken {
Begin,
Next,
End,
}
impl FromStr for LayoutToken {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Begin" | "begin" => Ok(LayoutToken::Begin),
"Next" | "next" => Ok(LayoutToken::Next),
"End" | "end" => Ok(LayoutToken::End),
_ => Err(format!("Unable to find LayoutToken with name: {s}")),
}
}
}
mod default_layouts {
use crate::parser::layout::LayoutType;
use crate::parser::util::Property;
use crate::parser::util::PropertyParser;
use super::*;
#[derive(Debug)]
pub struct Centered(PropertyParser);
impl Default for Centered {
fn default() -> Self {
let mut properties = HashMap::new();
properties.insert(
"style".to_string(),
Property::new(
true,
"Additional style for the split".to_string(),
Some("".to_string()),
),
);
Self(PropertyParser { properties })
}
}
impl LayoutType for Centered {
fn name(&self) -> &'static str { "Centered" }
fn expects(&self) -> Range<usize> { 1..1 }
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String> {
let props = if properties.is_empty() {
self.0.default()
} else {
self.0.parse(properties)
}
.map_err(|err| {
format!(
"Failed to parse properties for layout {}: {err}",
self.name()
)
})?;
let style = props
.get("style", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.map_err(|err| format!("Failed to parse style: {err:#?}"))
.map(|(_, value)| value)?;
Ok(Some(Box::new(style)))
}
fn compile(
&self,
token: LayoutToken,
_id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
_document: &dyn Document,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let style = match properties
.as_ref()
.unwrap()
.downcast_ref::<String>()
.unwrap()
.as_str()
{
"" => "".to_string(),
str => format!(r#" style={}"#, Compiler::sanitize(compiler.target(), str)),
};
match token {
LayoutToken::Begin => Ok(format!(r#"<div class="centered"{style}>"#)),
LayoutToken::Next => panic!(),
LayoutToken::End => Ok(r#"</div>"#.to_string()),
}
}
_ => todo!(""),
}
}
}
#[derive(Debug)]
pub struct Split(PropertyParser);
impl Default for Split {
fn default() -> Self {
let mut properties = HashMap::new();
properties.insert(
"style".to_string(),
Property::new(
true,
"Additional style for the split".to_string(),
Some("".to_string()),
),
);
Self(PropertyParser { properties })
}
}
impl LayoutType for Split {
fn name(&self) -> &'static str { "Split" }
fn expects(&self) -> Range<usize> { 2..usize::MAX }
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String> {
let props = if properties.is_empty() {
self.0.default()
} else {
self.0.parse(properties)
}
.map_err(|err| {
format!(
"Failed to parse properties for layout {}: {err}",
self.name()
)
})?;
let style = props
.get("style", |_, value| -> Result<String, ()> {
Ok(value.clone())
})
.map_err(|err| format!("Failed to parse style: {err:#?}"))
.map(|(_, value)| value)?;
Ok(Some(Box::new(style)))
}
fn compile(
&self,
token: LayoutToken,
_id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
_document: &dyn Document,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let style = match properties
.as_ref()
.unwrap()
.downcast_ref::<String>()
.unwrap()
.as_str()
{
"" => "".to_string(),
str => format!(r#" style={}"#, Compiler::sanitize(compiler.target(), str)),
};
match token {
LayoutToken::Begin => Ok(format!(
r#"<div class="split-container"><div class="split"{style}>"#
)),
LayoutToken::Next => Ok(format!(r#"</div><div class="split"{style}>"#)),
LayoutToken::End => Ok(r#"</div></div>"#.to_string()),
}
}
_ => todo!(""),
}
}
}
}
#[derive(Debug)]
struct Layout {
pub(self) location: Token,
pub(self) layout: Rc<dyn LayoutType>,
pub(self) id: usize,
pub(self) token: LayoutToken,
pub(self) properties: Option<Box<dyn Any>>,
}
impl Element for Layout {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Layout" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, _cursor: usize) -> Result<String, String> {
self.layout
.compile(self.token, self.id, &self.properties, compiler, document)
}
}
struct LayoutState {
/// The layout stack
pub(self) stack: Vec<(Vec<Token>, Rc<dyn LayoutType>)>,
}
impl RuleState for LayoutState {
fn scope(&self) -> Scope { Scope::DOCUMENT }
fn on_remove<'a>(
&self,
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let doc_borrow = document.content().borrow();
let at = doc_borrow.last().unwrap().location();
for (tokens, layout_type) in &self.stack {
let start = tokens.first().unwrap();
reports.push(
Report::build(ReportKind::Error, start.source(), start.start())
.with_message("Unterminated Layout")
.with_label(
Label::new((start.source(), start.range.start + 1..start.range.end))
.with_order(1)
.with_message(format!(
"Layout {} stars here",
layout_type.name().fg(state.parser.colors().info)
))
.with_color(state.parser.colors().error),
)
.with_label(
Label::new((at.source(), at.range.clone()))
.with_order(2)
.with_message("Document ends here".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
}
reports
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::layout")]
pub struct LayoutRule {
re: [Regex; 3],
}
impl LayoutRule {
pub fn new() -> Self {
Self {
re: [
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_BEGIN(?:\[((?:\\.|[^\\\\])*?)\])?(.*)",
)
.multi_line(true)
.build()
.unwrap(),
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_NEXT(?:\[((?:\\.|[^\\\\])*?)\])?$",
)
.multi_line(true)
.build()
.unwrap(),
RegexBuilder::new(
r"(?:^|\n)(?:[^\S\n]*)#\+LAYOUT_END(?:\[((?:\\.|[^\\\\])*?)\])?$",
)
.multi_line(true)
.build()
.unwrap(),
],
}
}
pub fn initialize_state(state: &ParserState) -> Rc<RefCell<dyn RuleState>> {
let mut rule_state_borrow = state.shared.rule_state.borrow_mut();
match rule_state_borrow.get(STATE_NAME) {
Some(state) => state,
None => {
// Insert as a new state
match rule_state_borrow.insert(
STATE_NAME.into(),
Rc::new(RefCell::new(LayoutState { stack: vec![] })),
) {
Err(err) => panic!("{err}"),
Ok(state) => state,
}
}
}
}
pub fn parse_properties<'a>(
colors: &ReportColors,
token: &Token,
layout_type: Rc<dyn LayoutType>,
properties: Option<Match>,
) -> Result<Option<Box<dyn Any>>, Report<'a, (Rc<dyn Source>, Range<usize>)>> {
match properties {
None => match layout_type.parse_properties("") {
Ok(props) => Ok(props),
Err(err) => Err(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unable to parse layout properties")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(err)
.with_color(colors.error),
)
.finish(),
),
},
Some(props) => {
let trimmed = props.as_str().trim_start().trim_end();
let content = process_escaped('\\', "]", trimmed);
match layout_type.parse_properties(content.as_str()) {
Ok(props) => Ok(props),
Err(err) => {
Err(
Report::build(ReportKind::Error, token.source(), props.start())
.with_message("Unable to parse layout properties")
.with_label(
Label::new((token.source(), props.range()))
.with_message(err)
.with_color(colors.error),
)
.finish(),
)
}
}
}
}
}
}
static STATE_NAME: &str = "elements.layout";
impl RegexRule for LayoutRule {
fn name(&self) -> &'static str { "Layout" }
fn previous(&self) -> Option<&'static str> { Some("Media") }
fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match(
&self,
index: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: Captures,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
let rule_state = LayoutRule::initialize_state(state);
if index == 0
// BEGIN_LAYOUT
{
match matches.get(2) {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Missing Layout Name")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Missing layout name after `{}`",
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(name) => {
let trimmed = name.as_str().trim_start().trim_end();
if name.as_str().is_empty() || trimmed.is_empty()
// Empty name
{
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty Layout Name")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Empty layout name after `{}`",
"#+BEGIN_LAYOUT".fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
} else if !name.as_str().chars().next().unwrap().is_whitespace()
// Missing space
{
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid Layout Name")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Missing a space before layout name `{}`",
name.as_str().fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Get layout
let layout_type = match state.shared.layouts.borrow().get(trimmed) {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Unknown Layout")
.with_label(
Label::new((token.source(), name.range()))
.with_message(format!(
"Cannot find layout `{}`",
trimmed.fg(state.parser.colors().highlight)
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(layout_type) => layout_type,
};
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
state.push(
document,
Box::new(Layout {
location: token.clone(),
layout: layout_type.clone(),
id: 0,
token: LayoutToken::Begin,
properties,
}),
);
rule_state
.as_ref()
.borrow_mut()
.downcast_mut::<LayoutState>()
.map_or_else(
|| panic!("Invalid state at: `{STATE_NAME}`"),
|s| s.stack.push((vec![token.clone()], layout_type.clone())),
);
}
};
return reports;
}
let (id, token_type, layout_type, properties) = if index == 1
// LAYOUT_NEXT
{
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = rule_state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, layout_type) = match layout_state.stack.last_mut() {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid #+LAYOUT_NEXT")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("No active layout found".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(last) => last,
};
if layout_type.expects().end < tokens.len()
// Too many blocks
{
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unexpected #+LAYOUT_NEXT")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Layout expects a maximum of {} blocks, currently at {}",
layout_type.expects().end.fg(state.parser.colors().info),
tokens.len().fg(state.parser.colors().info),
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
tokens.push(token.clone());
(
tokens.len() - 1,
LayoutToken::Next,
layout_type.clone(),
properties,
)
} else {
// LAYOUT_END
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = rule_state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, layout_type) = match layout_state.stack.last_mut() {
None => {
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Invalid #+LAYOUT_END")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message("No active layout found".to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Some(last) => last,
};
if layout_type.expects().start > tokens.len()
// Not enough blocks
{
reports.push(
Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unexpected #+LAYOUT_END")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(format!(
"Layout expects a minimum of {} blocks, currently at {}",
layout_type.expects().start.fg(state.parser.colors().info),
tokens.len().fg(state.parser.colors().info),
))
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
// Parse properties
let properties = match LayoutRule::parse_properties(
state.parser.colors(),
&token,
layout_type.clone(),
matches.get(1),
) {
Ok(props) => props,
Err(rep) => {
reports.push(rep);
return reports;
}
};
let layout_type = layout_type.clone();
let id = tokens.len();
layout_state.stack.pop();
(id, LayoutToken::End, layout_type, properties)
};
state.push(
document,
Box::new(Layout {
location: token,
layout: layout_type,
id,
token: token_type,
properties,
}),
);
reports
}
// TODO: Add method to create new layouts
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"push".to_string(),
lua.create_function(
|_, (token, layout, properties): (String, String, String)| {
let mut result = Ok(());
// Parse token
let layout_token = match LayoutToken::from_str(token.as_str())
{
Err(err) => {
return Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(err))
});
},
Ok(token) => token,
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
// Make sure the rule state has been initialized
let rule_state = LayoutRule::initialize_state(ctx.state);
// Get layout
//
let layout_type = match ctx.state.shared.layouts.borrow().get(layout.as_str())
{
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Cannot find layout with name `{layout}`"
))),
});
return;
},
Some(layout) => layout,
};
// Parse properties
let layout_properties = match layout_type.parse_properties(properties.as_str()) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 3,
name: Some("properties".to_string()),
cause: Arc::new(mlua::Error::external(err)),
});
return;
},
Ok(properties) => properties,
};
let id = match layout_token {
LayoutToken::Begin => {
ctx.state.push(
ctx.document,
Box::new(Layout {
location: ctx.location.clone(),
layout: layout_type.clone(),
id: 0,
token: LayoutToken::Begin,
properties: layout_properties,
}),
);
rule_state
.as_ref()
.borrow_mut()
.downcast_mut::<LayoutState>()
.map_or_else(
|| panic!("Invalid state at: `{STATE_NAME}`"),
|s| s.stack.push((vec![ctx.location.clone()], layout_type.clone())),
);
return;
},
LayoutToken::Next => {
let mut state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, current_layout_type) = match layout_state.stack.last_mut() {
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external("Unable set next layout: No active layout found".to_string())),
});
return;
}
Some(last) => last,
};
if !Rc::ptr_eq(&layout_type, current_layout_type) {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!("Invalid layout next, current layout is {} vs {}",
current_layout_type.name(),
layout_type.name())))
});
return;
}
if layout_type.expects().end < tokens.len()
// Too many blocks
{
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(format!("Unable set layout next: layout {} expect at most {} blocks, currently at {} blocks",
layout_type.name(),
layout_type.expects().end,
tokens.len()
))),
});
return;
}
tokens.push(ctx.location.clone());
tokens.len() - 1
},
LayoutToken::End => {
let mut state_borrow = rule_state.as_ref().borrow_mut();
let layout_state = state_borrow.downcast_mut::<LayoutState>().unwrap();
let (tokens, current_layout_type) = match layout_state.stack.last_mut() {
None => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external("Unable set layout end: No active layout found".to_string())),
});
return;
}
Some(last) => last,
};
if !Rc::ptr_eq(&layout_type, current_layout_type) {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("layout".to_string()),
cause: Arc::new(mlua::Error::external(format!("Invalid layout end, current layout is {} vs {}",
current_layout_type.name(),
layout_type.name())))
});
return;
}
if layout_type.expects().start > tokens.len()
// Not enough blocks
{
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("token".to_string()),
cause: Arc::new(mlua::Error::external(format!("Unable set next layout: layout {} expect at least {} blocks, currently at {} blocks",
layout_type.name(),
layout_type.expects().start,
tokens.len()
))),
});
return;
}
let id = tokens.len();
layout_state.stack.pop();
id
}
};
ctx.state.push(
ctx.document,
Box::new(Layout {
location: ctx.location.clone(),
layout: layout_type.clone(),
id,
token: layout_token,
properties: layout_properties,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings
}
fn register_layouts(&self, holder: &mut LayoutHolder) {
holder.insert(Rc::new(default_layouts::Centered::default()));
holder.insert(Rc::new(default_layouts::Split::default()));
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
#+LAYOUT_BEGIN[style=A] Split
A
#+LAYOUT_BEGIN[style=B] Centered
B
#+LAYOUT_END
#+LAYOUT_NEXT[style=C]
C
#+LAYOUT_BEGIN[style=D] Split
D
#+LAYOUT_NEXT[style=E]
E
#+LAYOUT_END
#+LAYOUT_END
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "A" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "B" };
};
Layout { token == LayoutToken::End, id == 1 };
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "C" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "D" };
};
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "E" };
};
Layout { token == LayoutToken::End, id == 2 };
Layout { token == LayoutToken::End, id == 2 };
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<nml.layout.push("begin", "Split", "style=A")>%
A
%<nml.layout.push("Begin", "Centered", "style=B")>%
B
%<nml.layout.push("end", "Centered", "")>%
%<nml.layout.push("next", "Split", "style=C")>%
C
%<nml.layout.push("Begin", "Split", "style=D")>%
D
%<nml.layout.push("Next", "Split", "style=E")>%
E
%<nml.layout.push("End", "Split", "")>%
%<nml.layout.push("End", "Split", "")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "A" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "B" };
};
Layout { token == LayoutToken::End, id == 1 };
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "C" };
};
Layout { token == LayoutToken::Begin, id == 0 };
Paragraph {
Text { content == "D" };
};
Layout { token == LayoutToken::Next, id == 1 };
Paragraph {
Text { content == "E" };
};
Layout { token == LayoutToken::End, id == 2 };
Layout { token == LayoutToken::End, id == 2 };
);
}
}

View file

@ -1,77 +1,62 @@
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target; use crate::compiler::compiler::Target;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::util; use crate::parser::util;
use ariadne::Fmt; use ariadne::Fmt;
use ariadne::Label; use ariadne::Label;
use ariadne::Report; use ariadne::Report;
use ariadne::ReportKind; use ariadne::ReportKind;
use mlua::Error::BadArgument;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc;
#[derive(Debug)] #[derive(Debug)]
pub struct Link { pub struct Link {
pub location: Token, location: Token,
/// Display content of link name: String, // Link name
pub display: Vec<Box<dyn Element>>, url: String, // Link url
/// Url of link }
pub url: String,
impl Link {
pub fn new(location: Token, name: String, url: String) -> Self {
Self {
location: location,
name,
url,
}
}
} }
impl Element for Link { impl Element for Link {
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline } fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Link" } fn element_name(&self) -> &'static str { "Link" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => Ok(format!(
let mut result = format!( "<a href=\"{}\">{}</a>",
"<a href=\"{}\">", Compiler::sanitize(compiler.target(), self.url.as_str()),
Compiler::sanitize(compiler.target(), self.url.as_str()) Compiler::sanitize(compiler.target(), self.name.as_str()),
); )),
Target::LATEX => Ok(format!(
for elem in &self.display { "\\href{{{}}}{{{}}}",
result += elem.compile(compiler, document, cursor+result.len())?.as_str(); Compiler::sanitize(compiler.target(), self.url.as_str()),
Compiler::sanitize(compiler.target(), self.name.as_str()),
)),
} }
result += "</a>";
Ok(result)
}
_ => todo!(""),
} }
} }
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
}
impl ContainerElement for Link {
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.display }
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> {
if elem.downcast_ref::<Link>().is_some() {
return Err("Tried to push a link inside of a link".to_string());
}
self.display.push(elem);
Ok(())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::link")]
pub struct LinkRule { pub struct LinkRule {
re: [Regex; 1], re: [Regex; 1],
} }
@ -86,75 +71,54 @@ impl LinkRule {
impl RegexRule for LinkRule { impl RegexRule for LinkRule {
fn name(&self) -> &'static str { "Link" } fn name(&self) -> &'static str { "Link" }
fn previous(&self) -> Option<&'static str> { Some("Link") }
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a dyn Document,
token: Token, token: Token,
matches: Captures, matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![]; let mut result = vec![];
let link_name = match matches.get(1) {
let link_display = match matches.get(1) { Some(name) => {
Some(display) => { if name.as_str().is_empty() {
if display.as_str().is_empty() { result.push(
reports.push( Report::build(ReportKind::Error, token.source(), name.start())
Report::build(ReportKind::Error, token.source(), display.start())
.with_message("Empty link name") .with_message("Empty link name")
.with_label( .with_label(
Label::new((token.source().clone(), display.range())) Label::new((token.source().clone(), name.range()))
.with_message("Link name is empty") .with_message("Link name is empty")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
return reports; return result;
} }
let processed = util::process_escaped('\\', "]", display.as_str()); // TODO: process into separate document...
if processed.is_empty() { let text_content = util::process_text(document, name.as_str());
reports.push(
Report::build(ReportKind::Error, token.source(), display.start()) if text_content.as_str().is_empty() {
result.push(
Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty link name") .with_message("Empty link name")
.with_label( .with_label(
Label::new((token.source(), display.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!(
"Link name is empty. Once processed, `{}` yields `{}`", "Link name is empty. Once processed, `{}` yields `{}`",
display.as_str().fg(state.parser.colors().highlight), name.as_str().fg(parser.colors().highlight),
processed.fg(state.parser.colors().highlight), text_content.as_str().fg(parser.colors().highlight),
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
return reports; return result;
}
let source = Rc::new(VirtualSource::new(
Token::new(display.range(), token.source()),
"Link Display".to_string(),
processed,
));
match util::parse_paragraph(state, source, document) {
Err(err) => {
reports.push(
Report::build(ReportKind::Error, token.source(), display.start())
.with_message("Failed to parse link display")
.with_label(
Label::new((token.source(), display.range()))
.with_message(err.to_string())
.with_color(state.parser.colors().error),
)
.finish(),
);
return reports;
}
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
} }
text_content
} }
_ => panic!("Empty link name"), _ => panic!("Empty link name"),
}; };
@ -162,173 +126,50 @@ impl RegexRule for LinkRule {
let link_url = match matches.get(2) { let link_url = match matches.get(2) {
Some(url) => { Some(url) => {
if url.as_str().is_empty() { if url.as_str().is_empty() {
reports.push( result.push(
Report::build(ReportKind::Error, token.source(), url.start()) Report::build(ReportKind::Error, token.source(), url.start())
.with_message("Empty link url") .with_message("Empty link url")
.with_label( .with_label(
Label::new((token.source(), url.range())) Label::new((token.source(), url.range()))
.with_message("Link url is empty") .with_message("Link url is empty")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
return reports; return result;
} }
let text_content = util::process_text(document, url.as_str()); let text_content = util::process_text(document, url.as_str());
if text_content.is_empty() { if text_content.as_str().is_empty() {
reports.push( result.push(
Report::build(ReportKind::Error, token.source(), url.start()) Report::build(ReportKind::Error, token.source(), url.start())
.with_message("Empty link url") .with_message("Empty link url")
.with_label( .with_label(
Label::new((token.source(), url.range())) Label::new((token.source(), url.range()))
.with_message(format!( .with_message(format!(
"Link url is empty. Once processed, `{}` yields `{}`", "Link url is empty. Once processed, `{}` yields `{}`",
url.as_str().fg(state.parser.colors().highlight), url.as_str().fg(parser.colors().highlight),
text_content.as_str().fg(state.parser.colors().highlight), text_content.as_str().fg(parser.colors().highlight),
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
return reports; return result;
} }
text_content text_content
} }
_ => panic!("Empty link url"), _ => panic!("Empty link url"),
}; };
state.push( parser.push(
document, document,
Box::new(Link { Box::new(Link::new(token.clone(), link_name, link_url)),
location: token,
display: link_display,
url: link_url,
}),
); );
reports return result;
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { // TODO
let mut bindings = vec![]; fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
bindings.push((
"push".to_string(),
lua.create_function(|_, (display, url): (String, String)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let source = Rc::new(VirtualSource::new(
ctx.location.clone(),
"Link Display".to_string(),
display,
));
let display_content =
match util::parse_paragraph(ctx.state, source, ctx.document) {
Err(err) => {
result = Err(BadArgument {
to: Some("push".to_string()),
pos: 1,
name: Some("display".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Failed to parse link display: {err}"
))),
});
return;
}
Ok(mut paragraph) => {
std::mem::take(&mut paragraph.content)
}
};
ctx.state.push(
ctx.document,
Box::new(Link {
location: ctx.location.clone(),
display: display_content,
url,
}),
);
})
});
result
})
.unwrap(),
));
bindings
}
}
#[cfg(test)]
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some [link](url).
[**BOLD link**](another url)
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "Some " };
Link { url == "url" } { Text { content == "link" }; };
Text { content == "." };
Link { url == "another url" } {
Style;
Text { content == "BOLD link" };
Style;
};
};
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some %<nml.link.push("link", "url")>%.
%<
nml.link.push("**BOLD link**", "another url")
>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "Some " };
Link { url == "url" } { Text { content == "link" }; };
Text { content == "." };
Link { url == "another url" } {
Style;
Text { content == "BOLD link" };
Style;
};
};
);
}
} }

View file

@ -1,515 +1,341 @@
use std::any::Any; use std::{any::Any, cell::Ref, ops::Range, rc::Rc};
use std::cell::Ref;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use crate::compiler::compiler::Compiler; use crate::{compiler::compiler::{Compiler, Target}, document::{document::{Document, DocumentAccessors}, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token, VirtualSource}}};
use crate::compiler::compiler::Target; use ariadne::{Label, Report, ReportKind};
use crate::document::document::Document; use mlua::{Function, Lua};
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::parser::parser::ParserState;
use crate::parser::rule::Rule;
use crate::parser::source::Cursor;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::source::VirtualSource;
use crate::parser::util;
use crate::parser::util::process_escaped;
use crate::parser::util::Property;
use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use regex::Match;
use regex::Regex; use regex::Regex;
#[derive(Debug, PartialEq, Eq, Clone, Copy)] use super::paragraph::Paragraph;
pub enum MarkerKind {
Open,
Close,
}
#[derive(Debug)]
pub struct ListMarker {
pub(self) location: Token,
pub(self) numbered: bool,
pub(self) kind: MarkerKind,
}
impl Element for ListMarker {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "List Marker" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
match compiler.target() {
Target::HTML => match (self.kind, self.numbered) {
(MarkerKind::Close, true) => Ok("</ol>".to_string()),
(MarkerKind::Close, false) => Ok("</ul>".to_string()),
(MarkerKind::Open, true) => Ok("<ol>".to_string()),
(MarkerKind::Open, false) => Ok("<ul>".to_string()),
},
_ => todo!(),
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct ListEntry { pub struct ListEntry {
pub(self) location: Token, location: Token,
pub(self) numbering: Vec<(bool, usize)>, numbering: Vec<(bool, usize)>,
pub(self) content: Vec<Box<dyn Element>>, content: Vec<Box<dyn Element>>,
pub(self) bullet: Option<String>,
// TODO bullet_maker : FnMut<...>
} }
impl Element for ListEntry { impl ListEntry {
pub fn new(location: Token, numbering: Vec<(bool, usize)>, content: Vec<Box<dyn Element>>) -> Self {
Self { location, numbering, content }
}
}
#[derive(Debug)]
pub struct List
{
location: Token,
entries: Vec<ListEntry>
}
impl List
{
pub fn new(location: Token) -> Self
{
Self
{
location,
entries: Vec::new()
}
}
pub fn push(&mut self, entry: ListEntry)
{
self.location.range = self.location.start()..entry.location.end();
self.entries.push(entry);
}
}
impl Element for List
{
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block } fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "List Entry" } fn element_name(&self) -> &'static str { "List" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
match compiler.target() {
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target()
{
Target::HTML => { Target::HTML => {
let mut result = String::new(); let mut result = String::new();
if let Some((numbered, number)) = self.numbering.last()
//TODO: Do something about indexing
let mut current_list: Vec<bool> = vec![];
let mut match_stack = |result: &mut String, target: &Vec<(bool, usize)>| {
// Find index after which current_list and target differ
let mut match_idx = 0usize;
for i in 0..current_list.len()
{ {
if *numbered { if i >= target.len() || current_list[i] != target[i].0 { break }
result += format!("<li value=\"{number}\">").as_str(); else { match_idx = i+1; }
}
else {
result += "<li>";
}
}
for elem in &self.content {
result += elem.compile(compiler, document, cursor+result.len())?.as_str();
}
result += "</li>";
Ok(result)
}
_ => todo!(),
}
} }
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) } // Close until same match
for _ in match_idx..current_list.len()
{
result.push_str(["</ul>", "</ol>"][current_list.pop().unwrap() as usize]);
} }
impl ContainerElement for ListEntry { // Open
fn contained(&self) -> &Vec<Box<dyn Element>> { &self.content } for i in match_idx..target.len()
{
fn push(&mut self, elem: Box<dyn Element>) -> Result<(), String> { result.push_str(["<ul>", "<ol>"][target[i].0 as usize]);
if elem.kind() == ElemKind::Block { current_list.push(target[i].0);
return Err("Cannot add block element inside a list".to_string());
} }
};
self.content.push(elem); match self.entries.iter()
.try_for_each(|ent|
{
match_stack(&mut result, &ent.numbering);
result.push_str("<li>");
match ent.content.iter().enumerate()
.try_for_each(|(_idx, elem)| {
match elem.compile(compiler, document) {
Err(e) => Err(e),
Ok(s) => { result.push_str(s.as_str()); Ok(()) }
}
})
{
Err(e) => Err(e),
_ => {
result.push_str("</li>");
Ok(()) Ok(())
} }
} }
})
{
Err(e) => return Err(e),
_ => {}
}
match_stack(&mut result, &Vec::<(bool, usize)>::new());
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::list")] Ok(result)
pub struct ListRule { }
Target::LATEX => Err("Unimplemented compiler".to_string())
}
}
}
/*
impl Element for ListEntry
{
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "List" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler) -> Result<String, String> {
lazy_static! {
static ref STATE_NAME : &'static str = "list.state";
static ref LIST_OPEN : [&'static str; 2] = ["<ul>", "<ol>"];
static ref LIST_CLOSE : [&'static str; 2] = ["</ul>", "</ol>"];
}
// TODO: State.shouldpreserve?
// Called upon every element
//let state = compiler.get_state_mut::<ListState, _>(*STATE_NAME)
//.or_else(|| {
// compiler.insert_state(STATE_NAME.to_string(), Box::new(ListState(Vec::new())) as Box<dyn Any>);
// compiler.get_state_mut::<ListState, _>(*STATE_NAME)
//}).unwrap();
match compiler.target()
{
Target::HTML => {
let mut result = String::new();
//TODO: Do something about indexing
//&self.numbering.iter()
// .zip(&state.0)
// .for_each(|((wants_numbered, _), is_numbered)|
// {
//
// });
result.push_str("<li>");
match self.content.iter()
.try_for_each(|ent| match ent.compile(compiler) {
Err(e) => Err(e),
Ok(s) => Ok(result.push_str(s.as_str())),
})
{
Err(e) => return Err(e),
_ => {}
}
result.push_str("</li>");
//result.push_str(LIST_OPEN[self.numbered as usize]);
//self.entries.iter()
// .for_each(|(_index, entry)|
// result.push_str(format!("<li>{}</li>", compiler.compile(entry)).as_str()));
//result.push_str(LIST_CLOSE[self.numbered as usize]);
Ok(result)
}
Target::LATEX => Err("Unimplemented compiler".to_string())
}
}
}
*/
pub struct ListRule
{
start_re: Regex, start_re: Regex,
continue_re: Regex, continue_re: Regex
properties: PropertyParser,
} }
impl ListRule { impl ListRule {
pub fn new() -> Self { pub fn new() -> Self {
let mut props = HashMap::new();
props.insert(
"offset".to_string(),
Property::new(false, "Entry numbering offset".to_string(), None),
);
props.insert(
"bullet".to_string(),
Property::new(false, "Entry bullet".to_string(), None),
);
Self { Self {
start_re: Regex::new(r"(?:^|\n)(?:[^\S\r\n]+)([*-]+)(?:\[((?:\\.|[^\\\\])*?)\])?(.*)") start_re: Regex::new(r"(?:^|\n)(?:[^\S\r\n]+)([*-]+).*").unwrap(),
.unwrap(), continue_re: Regex::new(r"(?:^|\n)([^\S\r\n]+).*").unwrap(),
continue_re: Regex::new(r"(?:^|\n)([^\S\r\n]+)([^\s].*)").unwrap(),
properties: PropertyParser { properties: props },
}
} }
fn push_markers(
token: &Token,
state: &ParserState,
document: &dyn Document,
current: &Vec<(bool, usize)>,
target: &Vec<(bool, usize)>,
) {
let mut start_pos = 0;
for i in 0..std::cmp::min(target.len(), current.len()) {
if current[i].0 != target[i].0 {
break;
} }
start_pos += 1; fn parse_depth(depth: &str, document: &dyn Document) -> Vec<(bool, usize)>
} {
// Close
for i in start_pos..current.len() {
state.push(
document,
Box::new(ListMarker {
location: token.clone(),
kind: MarkerKind::Close,
numbered: current[current.len() - 1 - (i - start_pos)].0,
}),
);
}
// Open
for i in start_pos..target.len() {
state.push(
document,
Box::new(ListMarker {
location: token.clone(),
kind: MarkerKind::Open,
numbered: target[i].0,
}),
);
}
}
fn parse_properties(&self, m: Match) -> Result<(Option<usize>, Option<String>), String> {
let processed = process_escaped('\\', "]", m.as_str());
let pm = self.properties.parse(processed.as_str())?;
let offset = match pm.get("offset", |_, s| s.parse::<usize>()) {
Ok((_, val)) => Some(val),
Err(err) => match err {
PropertyMapError::ParseError(err) => {
return Err(format!("Failed to parse `offset`: {err}"))
}
PropertyMapError::NotFoundError(_) => None,
},
};
let bullet = pm
.get("bullet", |_, s| -> Result<String, ()> { Ok(s.to_string()) })
.map(|(_, s)| s)
.ok();
Ok((offset, bullet))
}
fn parse_depth(depth: &str, document: &dyn Document, offset: usize) -> Vec<(bool, usize)> {
let mut parsed = vec![]; let mut parsed = vec![];
let prev_entry = document // FIXME: Previous iteration used to recursively retrieve the list indent
.last_element::<ListEntry>() let prev_entry = document.last_element::<List>()
.and_then(|list| Ref::filter_map(list, |m| m.entries.last() ).ok() )
.and_then(|entry| Ref::filter_map(entry, |e| Some(&e.numbering)).ok() ); .and_then(|entry| Ref::filter_map(entry, |e| Some(&e.numbering)).ok() );
let mut continue_match = true; let mut continue_match = true;
depth.chars().enumerate().for_each(|(idx, c)| { depth.chars().enumerate().for_each(|(idx, c)|
let number = if offset == usize::MAX { {
prev_entry let number = prev_entry.as_ref()
.as_ref()
.and_then(|v| { .and_then(|v| {
if !continue_match { if !continue_match { return None }
return None;
}
let numbered = c == '-'; let numbered = c == '-';
match v.get(idx) { match v.get(idx)
{
None => None, None => None,
Some((prev_numbered, prev_idx)) => { Some((prev_numbered, prev_idx)) => {
if *prev_numbered != numbered { if *prev_numbered != numbered { continue_match = false; None } // New depth
continue_match = false; else if idx+1 == v.len() { Some(prev_idx+1) } // Increase from previous
None else { Some(*prev_idx) } // Do nothing
}
// New depth
else if idx + 1 == v.len() {
Some(prev_idx + 1)
}
// Increase from previous
else {
Some(*prev_idx)
} // Do nothing
} }
} }
}) })
.unwrap_or(1) .or(Some(0usize))
} else { .unwrap();
offset
};
match c { match c
{
'*' => parsed.push((false, number)), '*' => parsed.push((false, number)),
'-' => parsed.push((true, number)), '-' => parsed.push((true, number)),
_ => panic!("Unimplemented"), _ => panic!("Unimplemented")
} }
}); });
parsed return parsed;
} }
} }
impl Rule for ListRule { impl Rule for ListRule
fn name(&self) -> &'static str { "List" }
fn previous(&self) -> Option<&'static str> { Some("Raw") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.start_re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
}
fn on_match<'a>(
&self,
state: &ParserState,
document: &'a dyn Document<'a>,
cursor: Cursor,
_match_data: Box<dyn Any>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let content = cursor.source.content();
let mut end_cursor = cursor.clone();
loop {
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
// Properties
let mut offset = None;
let mut bullet = None;
if let Some(properties) = captures.get(2) {
match self.parse_properties(properties) {
Err(err) => {
reports.push(
Report::build(
ReportKind::Warning,
cursor.source.clone(),
properties.start(),
)
.with_message("Invalid List Entry Properties")
.with_label(
Label::new((cursor.source.clone(), properties.range()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
);
break;
}
Ok(props) => (offset, bullet) = props,
}
}
// Get bullet from previous entry if it exists
if bullet.is_none() {
bullet = document
.last_element::<ListEntry>()
.and_then(|prev| prev.bullet.clone())
}
// Depth
let depth = ListRule::parse_depth(
captures.get(1).unwrap().as_str(),
document,
offset.unwrap_or(usize::MAX),
);
// Content
let entry_start = captures.get(0).unwrap().start();
let mut entry_content = captures.get(3).unwrap().as_str().to_string();
let mut spacing: Option<(Range<usize>, &str)> = None;
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
// Break if next element is another entry
if captures.get(0).unwrap().start() != end_cursor.pos
|| captures
.get(2)
.unwrap()
.as_str()
.find(['*', '-'])
== Some(0)
{ {
break; fn name(&self) -> &'static str { "List" }
}
// Advance cursor fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
end_cursor = end_cursor.at(captures.get(0).unwrap().end()); self.start_re.find_at(cursor.source.content(), cursor.pos)
.map_or(None,
|m| Some((m.start(), Box::new([false;0]) as Box<dyn Any>)) )
}
fn on_match<'a>(&self, parser: &dyn Parser, document: &'a dyn Document<'a>, cursor: Cursor, _match_data: Option<Box<dyn Any>>)
-> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let mut reports = vec![];
let content = cursor.source.content();
let (end_cursor, numbering, source) = match self.start_re.captures_at(content, cursor.pos) {
None => panic!("Unknown error"),
Some(caps) => {
let mut end_pos = caps.get(0).unwrap().end();
let mut spacing = None; // Spacing used to continue list entry
loop {
// If another entry starts on the next line, don't continue matching
match self.next_match(&cursor.at(end_pos))
{
Some((pos, _)) => {
if pos == end_pos { break }
}
None => {},
}
// Continue matching as current entry
match self.continue_re.captures_at(content, end_pos) {
None => break,
Some(continue_caps) => {
if continue_caps.get(0).unwrap().start() != end_pos { break }
// Get the spacing
let cap_spacing = continue_caps.get(1).unwrap();
match &spacing {
None => spacing = Some(cap_spacing.range()),
Some(spacing) => 'some: {
if content[cap_spacing.range()] == content[spacing.clone()] { break 'some }
// Spacing
let current_spacing = captures.get(1).unwrap().as_str();
if let Some(spacing) = &spacing {
if spacing.1 != current_spacing {
reports.push( reports.push(
Report::build( Report::build(ReportKind::Warning, cursor.source.clone(), continue_caps.get(1).unwrap().start())
ReportKind::Warning,
cursor.source.clone(),
captures.get(1).unwrap().start(),
)
.with_message("Invalid list entry spacing") .with_message("Invalid list entry spacing")
.with_label( .with_label(
Label::new(( Label::new((cursor.source.clone(), cap_spacing.range()))
cursor.source.clone(), .with_message("Spacing for list entries must match")
captures.get(1).unwrap().range(), .with_color(parser.colors().warning))
))
.with_message("Spacing for list entries do not match")
.with_color(state.parser.colors().warning),
)
.with_label( .with_label(
Label::new((cursor.source.clone(), spacing.0.clone())) Label::new((cursor.source.clone(), spacing.clone()))
.with_message("Previous spacing") .with_message("Previous spacing")
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning))
) .finish());
.finish(), },
); }
end_pos = continue_caps.get(0).unwrap().end();
}
} }
} else {
spacing = Some((captures.get(1).unwrap().range(), current_spacing));
} }
entry_content += " "; let start_pos = caps.get(1).unwrap().end();
entry_content += captures.get(2).unwrap().as_str(); let source = VirtualSource::new(
} Token::new(start_pos..end_pos, cursor.source.clone()),
// Parse entry content
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
let entry_src = Rc::new(VirtualSource::new(
token.clone(),
"List Entry".to_string(), "List Entry".to_string(),
entry_content, content.as_str()[start_pos..end_pos].to_string(),
));
let parsed_content = match util::parse_paragraph(state, entry_src, document) {
Err(err) => {
reports.push(
Report::build(ReportKind::Warning, token.source(), token.range.start)
.with_message("Unable to Parse List Entry")
.with_label(
Label::new((token.source(), token.range.clone()))
.with_message(err)
.with_color(state.parser.colors().warning),
)
.finish(),
); );
break;
} (cursor.at(end_pos),
Ok(mut paragraph) => std::mem::take(&mut paragraph.content), ListRule::parse_depth(caps.get(1).unwrap().as_str(), document),
source)
},
}; };
if let Some(previous_depth) = document let parsed_entry = parser.parse(Rc::new(source), Some(document));
.last_element::<ListEntry>() let mut parsed_paragraph = parsed_entry.last_element_mut::<Paragraph>().unwrap(); // Extract content from paragraph
.map(|ent| ent.numbering.clone()) let entry = ListEntry::new(
{ Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
ListRule::push_markers(&token, state, document, &previous_depth, &depth); numbering,
} else { std::mem::replace(&mut parsed_paragraph.content, Vec::new())
ListRule::push_markers(&token, state, document, &vec![], &depth);
}
state.push(
document,
Box::new(ListEntry {
location: Token::new(
entry_start..end_cursor.pos,
end_cursor.source.clone(),
),
numbering: depth,
content: parsed_content,
bullet,
}),
); );
} else {
break;
}
}
// Close all lists // Ger previous list, if none insert a new list
let current = document let mut list = match document.last_element_mut::<List>()
.last_element::<ListEntry>() {
.map(|ent| ent.numbering.clone()) Some(last) => last,
.unwrap(); None => {
let token = Token::new(end_cursor.pos..end_cursor.pos, end_cursor.source.clone()); parser.push(document,
ListRule::push_markers(&token, state, document, &current, &Vec::new()); Box::new(List::new(
Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()))));
document.last_element_mut::<List>().unwrap()
}
};
list.push(entry);
(end_cursor, reports) (end_cursor, reports)
} }
}
// TODO
#[cfg(test)] fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
mod tests {
use super::*;
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
* 1
*[offset=7] 2
continued
* 3
* New list
*-[bullet=(*)] A
*- B
* Back
*-* More nested
"#
.to_string(),
None,
));
let parser = LangParser::default();
let state = ParserState::new(&parser, None);
let (doc, _) = parser.parse(state, source, None);
validate_document!(doc.content().borrow(), 0,
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 1)] } {
Text { content == "1" };
};
ListEntry { numbering == vec![(false, 7)] } {
Text { content == "2 continued" };
};
ListEntry { numbering == vec![(false, 8)] } {
Text { content == "3" };
};
ListMarker { numbered == false, kind == MarkerKind::Close };
Paragraph;
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 1)] } {
Text { content == "New list" };
};
ListMarker { numbered == true, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 2), (true, 1)], bullet == Some("(*)".to_string()) } {
Text { content == "A" };
};
ListEntry { numbering == vec![(false, 2), (true, 2)], bullet == Some("(*)".to_string()) } {
Text { content == "B" };
};
ListMarker { numbered == true, kind == MarkerKind::Close };
ListEntry { numbering == vec![(false, 2)] } {
Text { content == "Back" };
};
ListMarker { numbered == true, kind == MarkerKind::Open };
ListMarker { numbered == false, kind == MarkerKind::Open };
ListEntry { numbering == vec![(false, 3), (true, 1), (false, 1)] } {
Text { content == "More nested" };
};
ListMarker { numbered == false, kind == MarkerKind::Close };
ListMarker { numbered == true, kind == MarkerKind::Close };
ListMarker { numbered == false, kind == MarkerKind::Close };
);
}
} }

View file

@ -21,7 +21,7 @@ use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::document::element::ReferenceableElement; use crate::document::element::ReferenceableElement;
use crate::document::references::validate_refname; use crate::document::references::validate_refname;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -35,7 +35,7 @@ use crate::parser::util::PropertyMapError;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
use super::reference::InternalReference; use super::reference::Reference;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum MediaType { pub enum MediaType {
@ -70,16 +70,21 @@ impl Element for Media {
fn element_name(&self) -> &'static str { "Media" } fn element_name(&self) -> &'static str { "Media" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) } fn as_container(&self) -> Option<&dyn ContainerElement> { Some(self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> { fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
let mut result = String::new(); let mut result = String::new();
result.push_str("<div class=\"media\">"); result.push_str("<div class=\"media\">");
for medium in &self.media { for medium in &self.media {
result += medium.compile(compiler, document, cursor+result.len())?.as_str(); match medium.compile(compiler, document) {
Ok(r) => result.push_str(r.as_str()),
Err(e) => return Err(e),
}
} }
result.push_str("</div>"); result.push_str("</div>");
@ -130,25 +135,25 @@ impl Element for Medium {
fn element_name(&self) -> &'static str { "Medium" } fn element_name(&self) -> &'static str { "Medium" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) } fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> { fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
let mut result = String::new(); let mut result = String::new();
// Reference
let elemref = document.get_reference(self.reference.as_str()).unwrap();
let refcount = compiler.reference_id(document, elemref);
let width = self let width = self
.width .width
.as_ref() .as_ref()
.map_or(String::new(), |w| format!(r#" style="width:{w};""#)); .map_or(String::new(), |w| format!(r#" style="width:{w};""#));
result.push_str(format!(r#"<div id="{}" class="medium"{width}>"#, self.refid(compiler, refcount)).as_str()); result.push_str(format!(r#"<div class="medium"{width}>"#).as_str());
result += match self.media_type { result += match self.media_type {
MediaType::IMAGE => format!(r#"<a href="{0}"><img src="{0}"></a>"#, self.uri), MediaType::IMAGE => format!(r#"<a href="{0}"><img src="{0}"></a>"#, self.uri),
MediaType::VIDEO => format!(r#"<video controls{width}><source src="{0}"></video>"#, self.uri MediaType::VIDEO => format!(
r#"<video controls{width}><source src="{0}"></video>"#,
self.uri
), ),
MediaType::AUDIO => { MediaType::AUDIO => {
format!(r#"<audio controls src="{0}"{width}></audio>"#, self.uri) format!(r#"<audio controls src="{0}"{width}></audio>"#, self.uri)
@ -158,17 +163,26 @@ impl Element for Medium {
let caption = self let caption = self
.caption .caption
.as_ref().map(|cap| format!( .as_ref()
.and_then(|cap| {
Some(format!(
" {}", " {}",
Compiler::sanitize(compiler.target(), cap.as_str()) Compiler::sanitize(compiler.target(), cap.as_str())
)) ))
.unwrap_or_default(); })
.unwrap_or(String::new());
// Reference
let elemref = document.get_reference(self.reference.as_str()).unwrap();
let refcount = compiler.reference_id(document, elemref);
result.push_str( result.push_str(
format!(r#"<p class="medium-refname">({refcount}){caption}</p>"#).as_str(), format!(r#"<p class="medium-refname">({refcount}){caption}</p>"#).as_str(),
); );
if let Some(paragraph) = self.description.as_ref() { if let Some(paragraph) = self.description.as_ref() {
result += paragraph.compile(compiler, document, cursor+result.len())?.as_str(); match paragraph.compile(compiler, document) {
Ok(res) => result.push_str(res.as_str()),
Err(err) => return Err(err),
}
} }
result.push_str("</div>"); result.push_str("</div>");
@ -188,7 +202,7 @@ impl ReferenceableElement for Medium {
&self, &self,
compiler: &Compiler, compiler: &Compiler,
_document: &dyn Document, _document: &dyn Document,
reference: &InternalReference, reference: &Reference,
refid: usize, refid: usize,
) -> Result<String, String> { ) -> Result<String, String> {
match compiler.target() { match compiler.target() {
@ -200,11 +214,7 @@ impl ReferenceableElement for Medium {
// TODO Handle other kind of media // TODO Handle other kind of media
match self.media_type { match self.media_type {
MediaType::IMAGE => Ok(format!( MediaType::IMAGE => Ok(format!(
"<a class=\"medium-ref\" href=\"#medium-{refid}\">{caption}<img src=\"{}\"></a>", r#"<a class="medium-ref">{caption}<img src="{}"></a>"#,
self.uri
)),
MediaType::VIDEO => Ok(format!(
"<a class=\"medium-ref\" href=\"#medium-{refid}\">{caption}<video><source src=\"{0}\"></video></a>",
self.uri self.uri
)), )),
_ => todo!(""), _ => todo!(""),
@ -213,13 +223,8 @@ impl ReferenceableElement for Medium {
_ => todo!(""), _ => todo!(""),
} }
} }
fn refid(&self, _compiler: &Compiler, refid: usize) -> String {
format!("medium-{refid}")
}
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::media")]
pub struct MediaRule { pub struct MediaRule {
re: [Regex; 1], re: [Regex; 1],
properties: PropertyParser, properties: PropertyParser,
@ -251,7 +256,7 @@ impl MediaRule {
.multi_line(true) .multi_line(true)
.build() .build()
.unwrap()], .unwrap()],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
@ -324,14 +329,13 @@ impl MediaRule {
impl RegexRule for MediaRule { impl RegexRule for MediaRule {
fn name(&self) -> &'static str { "Media" } fn name(&self) -> &'static str { "Media" }
fn previous(&self) -> Option<&'static str> { Some("Graphviz") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a (dyn Document<'a> + 'a),
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -375,8 +379,7 @@ impl RegexRule for MediaRule {
}; };
// Properties // Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3)) let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
{
Ok(pm) => pm, Ok(pm) => pm,
Err(report) => { Err(report) => {
reports.push(report); reports.push(report);
@ -400,10 +403,10 @@ impl RegexRule for MediaRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Property `type: {}` cannot be converted: {}", "Property `type: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(parser.colors().info),
err.fg(state.parser.colors().error) err.fg(parser.colors().error)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -419,7 +422,7 @@ impl RegexRule for MediaRule {
token.start() + 1..token.end(), token.start() + 1..token.end(),
)) ))
.with_message(format!("{err}. Required because mediatype could not be detected")) .with_message(format!("{err}. Required because mediatype could not be detected"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -433,13 +436,15 @@ impl RegexRule for MediaRule {
.get("width", |_, value| -> Result<String, ()> { .get("width", |_, value| -> Result<String, ()> {
Ok(value.clone()) Ok(value.clone())
}) })
.ok().map(|(_, s)| s); .ok()
.and_then(|(_, s)| Some(s));
let caption = properties let caption = properties
.get("caption", |_, value| -> Result<String, ()> { .get("caption", |_, value| -> Result<String, ()> {
Ok(value.clone()) Ok(value.clone())
}) })
.ok().map(|(_, value)| value); .ok()
.and_then(|(_, value)| Some(value));
let description = match matches.get(4) { let description = match matches.get(4) {
Some(content) => { Some(content) => {
@ -451,7 +456,7 @@ impl RegexRule for MediaRule {
if source.content().is_empty() { if source.content().is_empty() {
None None
} else { } else {
match parse_paragraph(state, source, document) { match parse_paragraph(parser, source, document) {
Ok(paragraph) => Some(*paragraph), Ok(paragraph) => Some(*paragraph),
Err(err) => { Err(err) => {
reports.push( reports.push(
@ -462,7 +467,7 @@ impl RegexRule for MediaRule {
.with_message(format!( .with_message(format!(
"Could not parse description: {err}" "Could not parse description: {err}"
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -477,7 +482,7 @@ impl RegexRule for MediaRule {
let mut group = match document.last_element_mut::<Media>() { let mut group = match document.last_element_mut::<Media>() {
Some(group) => group, Some(group) => group,
None => { None => {
state.push( parser.push(
document, document,
Box::new(Media { Box::new(Media {
location: token.clone(), location: token.clone(),
@ -504,7 +509,7 @@ impl RegexRule for MediaRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(err) .with_message(err)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -512,12 +517,15 @@ impl RegexRule for MediaRule {
reports reports
} }
fn lua_bindings<'lua>(&self, _lua: &'lua mlua::Lua) -> Vec<(String, mlua::Function<'lua>)> {
vec![]
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use super::*; use super::*;
@ -546,7 +554,7 @@ mod tests {
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); let doc = parser.parse(source, None);
let borrow = doc.content().borrow(); let borrow = doc.content().borrow();
let group = borrow.first().as_ref().unwrap().as_container().unwrap(); let group = borrow.first().as_ref().unwrap().as_container().unwrap();

View file

@ -1,20 +1,17 @@
pub mod code; pub mod registrar;
pub mod comment;
pub mod graphviz;
pub mod import;
pub mod layout;
pub mod link;
pub mod list;
pub mod media;
pub mod paragraph;
pub mod raw;
pub mod reference;
pub mod script;
pub mod section;
pub mod style;
pub mod tex;
pub mod text; pub mod text;
pub mod comment;
pub mod paragraph;
pub mod variable; pub mod variable;
pub mod elemstyle; pub mod import;
pub mod customstyle; pub mod script;
pub mod blockquote; pub mod list;
pub mod style;
pub mod section;
pub mod link;
pub mod code;
pub mod tex;
pub mod graphviz;
pub mod raw;
pub mod media;
pub mod reference;

View file

@ -3,6 +3,8 @@ use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use ariadne::Report; use ariadne::Report;
use mlua::Function;
use mlua::Lua;
use regex::Regex; use regex::Regex;
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
@ -11,7 +13,7 @@ use crate::document::document::Document;
use crate::document::element::ContainerElement; use crate::document::element::ContainerElement;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -48,28 +50,43 @@ impl Element for Paragraph {
fn element_name(&self) -> &'static str { "Paragraph" } fn element_name(&self) -> &'static str { "Paragraph" }
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{:#?}", self) }
fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
if self.content.is_empty() { if self.content.is_empty() {
return Ok(String::new()); return Ok(String::new());
} }
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
if self.content.is_empty() {
return Ok(String::new());
}
let mut result = String::new(); let mut result = String::new();
//if prev.is_none() || prev.unwrap().downcast_ref::<Paragraph>().is_none()
{
result.push_str("<p>"); result.push_str("<p>");
for elems in &self.content {
result += elems.compile(compiler, document, cursor+result.len())?.as_str();
} }
//else
//{ result.push_str(" "); }
let err = self.content.iter().try_for_each(|elem| {
match elem.compile(compiler, document) {
Err(e) => return Err(e),
Ok(content) => {
result.push_str(content.as_str());
Ok(())
}
}
});
//if next.is_none() || next.unwrap().downcast_ref::<Paragraph>().is_none()
{
result.push_str("</p>"); result.push_str("</p>");
Ok(result)
} }
_ => todo!("Unimplemented compiler"),
match err {
Err(e) => Err(e),
Ok(()) => Ok(result),
}
}
Target::LATEX => todo!("Unimplemented compiler"),
} }
} }
@ -83,15 +100,11 @@ impl ContainerElement for Paragraph {
if elem.location().source() == self.location().source() { if elem.location().source() == self.location().source() {
self.location.range = self.location.start()..elem.location().end(); self.location.range = self.location.start()..elem.location().end();
} }
if elem.kind() == ElemKind::Block {
return Err("Attempted to push block element inside a paragraph".to_string());
}
self.content.push(elem); self.content.push(elem);
Ok(()) Ok(())
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::paragraph")]
pub struct ParagraphRule { pub struct ParagraphRule {
re: Regex, re: Regex,
} }
@ -105,27 +118,27 @@ impl ParagraphRule {
} }
impl Rule for ParagraphRule { impl Rule for ParagraphRule {
fn name(&self) -> &'static str { "Paragraph" } fn name(&self) -> &'static str { "Paragraphing" }
fn previous(&self) -> Option<&'static str> { Some("Comment") }
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
self.re self.re
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>)) .find_at(cursor.source.content(), cursor.pos)
.and_then(|m| Some((m.start(), Box::new([false; 0]) as Box<dyn Any>)))
} }
fn on_match( fn on_match(
&self, &self,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
cursor: Cursor, cursor: Cursor,
_match_data: Box<dyn Any>, _match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) { ) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let end_cursor = match self.re.captures_at(cursor.source.content(), cursor.pos) { let end_cursor = match self.re.captures_at(cursor.source.content(), cursor.pos) {
None => panic!("Unknown error"), None => panic!("Unknown error"),
Some(capture) => cursor.at(capture.get(0).unwrap().end() - 1), Some(capture) => cursor.at(capture.get(0).unwrap().end() - 1),
}; };
state.push( parser.push(
document, document,
Box::new(Paragraph { Box::new(Paragraph {
location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()), location: Token::new(cursor.pos..end_cursor.pos, cursor.source.clone()),
@ -135,49 +148,7 @@ impl Rule for ParagraphRule {
(end_cursor, Vec::new()) (end_cursor, Vec::new())
} }
}
// TODO
#[cfg(test)] fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
mod tests {
use crate::elements::paragraph::Paragraph;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parse() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
First paragraph
Second line
Second paragraph\
<- literal \\n
Last paragraph
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text { content == "First paragraph Second line" };
};
Paragraph {
Text { content == "Second paragraph\n<- literal \\n" };
};
Paragraph {
Text { content == "Last paragraph " };
};
);
}
} }

View file

@ -3,7 +3,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
@ -27,10 +27,20 @@ use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug)] #[derive(Debug)]
pub struct Raw { struct Raw {
pub location: Token, pub(self) location: Token,
pub kind: ElemKind, pub(self) kind: ElemKind,
pub content: String, pub(self) content: String,
}
impl Raw {
fn new(location: Token, kind: ElemKind, content: String) -> Self {
Self {
location,
kind,
content,
}
}
} }
impl Element for Raw { impl Element for Raw {
@ -39,12 +49,13 @@ impl Element for Raw {
fn element_name(&self) -> &'static str { "Raw" } fn element_name(&self) -> &'static str { "Raw" }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, _compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(self.content.clone()) Ok(self.content.clone())
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::raw")]
pub struct RawRule { pub struct RawRule {
re: [Regex; 1], re: [Regex; 1],
properties: PropertyParser, properties: PropertyParser,
@ -66,21 +77,20 @@ impl RawRule {
Regex::new(r"\{\?(?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)(\?\}))?") Regex::new(r"\{\?(?:\[((?:\\.|[^\[\]\\])*?)\])?(?:((?:\\.|[^\\\\])*?)(\?\}))?")
.unwrap(), .unwrap(),
], ],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
} }
impl RegexRule for RawRule { impl RegexRule for RawRule {
fn name(&self) -> &'static str { "Raw" } fn name(&self) -> &'static str { "Raw" }
fn previous(&self) -> Option<&'static str> { Some("Variable Substitution") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match( fn on_regex_match(
&self, &self,
_index: usize, _index: usize,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -97,10 +107,10 @@ impl RegexRule for RawRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Missing terminating `{}` after first `{}`", "Missing terminating `{}` after first `{}`",
"?}".fg(state.parser.colors().info), "?}".fg(parser.colors().info),
"{?".fg(state.parser.colors().info) "{?".fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -117,7 +127,7 @@ impl RegexRule for RawRule {
.with_label( .with_label(
Label::new((token.source().clone(), content.range())) Label::new((token.source().clone(), content.range()))
.with_message("Raw code is empty") .with_message("Raw code is empty")
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -136,7 +146,7 @@ impl RegexRule for RawRule {
.with_label( .with_label(
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!("Raw code is missing properties: {e}")) .with_message(format!("Raw code is missing properties: {e}"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -154,7 +164,7 @@ impl RegexRule for RawRule {
.with_label( .with_label(
Label::new((token.source().clone(), props.range())) Label::new((token.source().clone(), props.range()))
.with_message(e) .with_message(e)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -178,10 +188,10 @@ impl RegexRule for RawRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Property `kind: {}` cannot be converted: {}", "Property `kind: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(parser.colors().info),
err.fg(state.parser.colors().error) err.fg(parser.colors().error)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -198,9 +208,9 @@ impl RegexRule for RawRule {
)) ))
.with_message(format!( .with_message(format!(
"Property `{}` is missing", "Property `{}` is missing",
err.fg(state.parser.colors().info) err.fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -209,7 +219,7 @@ impl RegexRule for RawRule {
}, },
}; };
state.push( parser.push(
document, document,
Box::new(Raw { Box::new(Raw {
location: token.clone(), location: token.clone(),
@ -221,7 +231,7 @@ impl RegexRule for RawRule {
reports reports
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![]; let mut bindings = vec![];
bindings.push(( bindings.push((
@ -244,7 +254,7 @@ impl RegexRule for RawRule {
CTX.with_borrow(|ctx| { CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
ctx.state.push( ctx.parser.push(
ctx.document, ctx.document,
Box::new(Raw { Box::new(Raw {
location: ctx.location.clone(), location: ctx.location.clone(),
@ -267,56 +277,31 @@ impl RegexRule for RawRule {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::elements::paragraph::Paragraph; use crate::compiler::compiler::Target;
use crate::elements::text::Text;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document;
#[test] #[test]
fn parser() { fn raw_tests() {
let source = Rc::new(SourceFile::with_content( let source = Rc::new(SourceFile::with_content(
"".to_string(), "".to_string(),
r#" r#"
Break{?[kind=block] Raw?}NewParagraph{?<b>?} Break{?[kind=block]<RAW>?}NewParagraph
"# "#
.to_string(), .to_string(),
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); let compiler = Compiler::new(Target::HTML, None);
let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0, let borrow = doc.content().borrow();
Paragraph; let found = borrow
Raw { kind == ElemKind::Block, content == "Raw" }; .iter()
Paragraph { .filter_map(|e| e.downcast_ref::<Raw>())
Text; .collect::<Vec<_>>();
Raw { kind == ElemKind::Inline, content == "<b>" };
};
);
}
#[test] assert_eq!(found[0].compile(&compiler, &*doc), Ok("<RAW>".to_string()));
fn lua() { //assert_eq!(found[1].compile(&compiler, &*doc), Ok("<RAW>".to_string()));
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Break%<nml.raw.push("block", "Raw")>%NewParagraph%<nml.raw.push("inline", "<b>")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph;
Raw { kind == ElemKind::Block, content == "Raw" };
Paragraph {
Text;
Raw { kind == ElemKind::Inline, content == "<b>" };
};
);
} }
} }

View file

@ -2,67 +2,54 @@ use std::collections::HashMap;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use ariadne::Fmt;
use ariadne::Label; use ariadne::Label;
use ariadne::Report; use ariadne::Report;
use ariadne::ReportKind; use ariadne::ReportKind;
use reference_style::ExternalReferenceStyle;
use regex::Captures; use regex::Captures;
use regex::Match; use regex::Match;
use regex::Regex; use regex::Regex;
use runtime_format::FormatArgs;
use runtime_format::FormatKey;
use runtime_format::FormatKeyError;
use crate::compiler::compiler::Compiler; use crate::compiler::compiler::Compiler;
use crate::compiler::compiler::Target; use crate::compiler::compiler::Target;
use crate::document::document::CrossReference;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::document::references::validate_refname; use crate::document::references::validate_refname;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::style::StyleHolder;
use crate::parser::util; use crate::parser::util;
use crate::parser::util::Property; use crate::parser::util::Property;
use crate::parser::util::PropertyMap; use crate::parser::util::PropertyMap;
use crate::parser::util::PropertyParser; use crate::parser::util::PropertyParser;
#[derive(Debug)] #[derive(Debug)]
pub struct InternalReference { pub struct Reference {
pub(self) location: Token, pub(self) location: Token,
pub(self) refname: String, pub(self) refname: String,
pub(self) caption: Option<String>, pub(self) caption: Option<String>,
} }
impl InternalReference { impl Reference {
pub fn caption(&self) -> Option<&String> { self.caption.as_ref() } pub fn caption(&self) -> Option<&String> { self.caption.as_ref() }
} }
impl Element for InternalReference { impl Element for Reference {
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline } fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Reference" } fn element_name(&self) -> &'static str { "Reference" }
fn compile( fn to_string(&self) -> String { format!("{self:#?}") }
&self,
compiler: &Compiler, fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
let elemref = document let elemref = document.get_reference(self.refname.as_str()).unwrap();
.get_reference(self.refname.as_str())
.ok_or(format!(
"Unable to find reference `{}` in current document",
self.refname
))?;
let elem = document.get_from_reference(&elemref).unwrap(); let elem = document.get_from_reference(&elemref).unwrap();
elem.compile_reference( elem.compile_reference(
@ -77,89 +64,6 @@ impl Element for InternalReference {
} }
} }
#[derive(Debug)]
pub struct ExternalReference {
pub(self) location: Token,
pub(self) reference: CrossReference,
pub(self) caption: Option<String>,
pub(self) style: Rc<reference_style::ExternalReferenceStyle>,
}
struct FmtPair<'a>(Target, &'a ExternalReference);
impl FormatKey for FmtPair<'_> {
fn fmt(&self, key: &str, f: &mut std::fmt::Formatter<'_>) -> Result<(), FormatKeyError> {
match &self.1.reference {
CrossReference::Unspecific(refname) => match key {
"refname" => write!(f, "{}", Compiler::sanitize(self.0, refname))
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
},
CrossReference::Specific(refdoc, refname) => match key {
"refdoc" => {
write!(f, "{}", Compiler::sanitize(self.0, refdoc)).map_err(FormatKeyError::Fmt)
}
"refname" => write!(f, "{}", Compiler::sanitize(self.0, refname))
.map_err(FormatKeyError::Fmt),
_ => Err(FormatKeyError::UnknownKey),
},
}
}
}
impl Element for ExternalReference {
fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Reference" }
fn compile(
&self,
compiler: &Compiler,
_document: &dyn Document,
cursor: usize,
) -> Result<String, String> {
match compiler.target() {
Target::HTML => {
let mut result = "<a href=\"".to_string();
// Link position
let crossreference_pos = cursor + result.len();
if let Some(caption) = &self.caption {
result +=
format!("\">{}</a>", Compiler::sanitize(Target::HTML, caption)).as_str();
} else {
// Use style
let fmt_pair = FmtPair(compiler.target(), self);
let format_string = match &self.reference {
CrossReference::Unspecific(_) => Compiler::sanitize_format(
fmt_pair.0,
self.style.format_unspecific.as_str(),
),
CrossReference::Specific(_, _) => Compiler::sanitize_format(
fmt_pair.0,
self.style.format_specific.as_str(),
),
};
let args = FormatArgs::new(format_string.as_str(), &fmt_pair);
args.status().map_err(|err| {
format!("Failed to format ExternalReference style `{format_string}`: {err}")
})?;
result += format!("\">{}</a>", args.to_string()).as_str();
}
// Add crossreference
compiler.insert_crossreference(crossreference_pos, self.reference.clone());
Ok(result)
}
_ => todo!(""),
}
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::reference")]
pub struct ReferenceRule { pub struct ReferenceRule {
re: [Regex; 1], re: [Regex; 1],
properties: PropertyParser, properties: PropertyParser,
@ -177,8 +81,8 @@ impl ReferenceRule {
), ),
); );
Self { Self {
re: [Regex::new(r"§\{(.*?)\}(\[((?:\\.|[^\\\\])*?)\])?").unwrap()], re: [Regex::new(r"§\{(.*)\}(\[((?:\\.|[^\\\\])*?)\])?").unwrap()],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
@ -225,67 +129,56 @@ impl ReferenceRule {
impl RegexRule for ReferenceRule { impl RegexRule for ReferenceRule {
fn name(&self) -> &'static str { "Reference" } fn name(&self) -> &'static str { "Reference" }
fn previous(&self) -> Option<&'static str> { Some("Text") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a (dyn Document<'a> + 'a),
token: Token, token: Token,
matches: Captures, matches: Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![]; let mut reports = vec![];
let (refdoc, refname) = if let Some(refname_match) = matches.get(1) { let refname = match (
if let Some(sep) = refname_match.as_str().find('#') matches.get(1).unwrap(),
// External reference validate_refname(document, matches.get(1).unwrap().as_str(), false),
{ ) {
let refdoc = refname_match.as_str().split_at(sep).0; (m, Ok(refname)) => {
match validate_refname(document, refname_match.as_str().split_at(sep + 1).1, false) if document.get_reference(refname).is_none() {
{
Err(err) => {
reports.push( reports.push(
Report::build(ReportKind::Error, token.source(), refname_match.start()) Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Invalid Reference Refname") .with_message("Uknown Reference Refname")
.with_label( .with_label(
Label::new((token.source().clone(), refname_match.range())) Label::new((token.source().clone(), m.range())).with_message(
.with_message(err), format!(
"Could not find element with reference: `{}`",
refname.fg(parser.colors().info)
),
),
) )
.finish(), .finish(),
); );
return reports; return reports;
} }
Ok(refname) => (Some(refdoc.to_string()), refname.to_string()), refname.to_string()
} }
} else (m, Err(err)) => {
// Internal reference
{
match validate_refname(document, refname_match.as_str(), false) {
Err(err) => {
reports.push( reports.push(
Report::build(ReportKind::Error, token.source(), refname_match.start()) Report::build(ReportKind::Error, token.source(), m.start())
.with_message("Invalid Reference Refname") .with_message("Invalid Reference Refname")
.with_label( .with_label(
Label::new((token.source().clone(), refname_match.range())) Label::new((token.source().clone(), m.range())).with_message(err),
.with_message(err),
) )
.finish(), .finish(),
); );
return reports; return reports;
} }
Ok(refname) => (None, refname.to_string()),
}
}
} else {
panic!("Unknown error")
}; };
// Properties // Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(3)) let properties = match self.parse_properties(parser.colors(), &token, &matches.get(3)) {
{
Ok(pm) => pm, Ok(pm) => pm,
Err(report) => { Err(report) => {
reports.push(report); reports.push(report);
@ -298,193 +191,21 @@ impl RegexRule for ReferenceRule {
Ok(value.clone()) Ok(value.clone())
}) })
.ok() .ok()
.map(|(_, s)| s); .and_then(|(_, s)| Some(s));
if let Some(refdoc) = refdoc { parser.push(
// Get style
let style = state
.shared
.styles
.borrow()
.current(reference_style::STYLE_KEY)
.downcast_rc::<reference_style::ExternalReferenceStyle>()
.unwrap();
// §{#refname}
if refdoc.is_empty() {
state.push(
document, document,
Box::new(ExternalReference { Box::new(Reference {
location: token,
reference: CrossReference::Unspecific(refname),
caption,
style,
}),
);
// §{docname#refname}
} else {
state.push(
document,
Box::new(ExternalReference {
location: token,
reference: CrossReference::Specific(refdoc, refname),
caption,
style,
}),
);
}
} else {
state.push(
document,
Box::new(InternalReference {
location: token, location: token,
refname, refname,
caption, caption,
}), }),
); );
}
reports reports
} }
fn register_styles(&self, holder: &mut StyleHolder) { fn lua_bindings<'lua>(&self, _lua: &'lua mlua::Lua) -> Vec<(String, mlua::Function<'lua>)> {
holder.set_current(Rc::new(ExternalReferenceStyle::default())); vec![]
}
}
mod reference_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.external_reference";
#[derive(Debug, Serialize, Deserialize)]
pub struct ExternalReferenceStyle {
pub format_unspecific: String,
pub format_specific: String,
}
impl Default for ExternalReferenceStyle {
fn default() -> Self {
Self {
format_unspecific: "(#{refname})".into(),
format_specific: "({refdoc}#{refname})".into(),
}
}
}
impl_elementstyle!(ExternalReferenceStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::compiler::process::process_from_memory;
use crate::elements::paragraph::Paragraph;
use crate::elements::section::Section;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
pub fn parse_internal() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
#{ref} Referenceable section
§{ref}[caption=Section]
§{ref}[caption=Another]
§{ref2}[caption=Before]
#{ref2} Another section
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section;
Paragraph {
InternalReference { refname == "ref", caption == Some("Section".to_string()) };
InternalReference { refname == "ref", caption == Some("Another".to_string()) };
InternalReference { refname == "ref2", caption == Some("Before".to_string()) };
};
Paragraph;
Section;
);
}
#[test]
pub fn parse_external() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
§{DocA#ref}[caption=Section]
§{DocB#ref}
§{#ref}[caption='ref' from any document]
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
ExternalReference { reference == CrossReference::Specific("DocA".into(), "ref".into()), caption == Some("Section".to_string()) };
ExternalReference { reference == CrossReference::Specific("DocB".into(), "ref".into()), caption == None::<String> };
ExternalReference { reference == CrossReference::Unspecific("ref".into()), caption == Some("'ref' from any document".to_string()) };
};
);
}
#[test]
pub fn test_external() {
let result = process_from_memory(
Target::HTML,
vec![
r#"
@html.page_title = 0
@compiler.output = a.html
#{ref} Referenceable section
"#
.into(),
r#"
@html.page_title = 1
@compiler.output = b.html
§{#ref}
§{a#ref}
#{ref2} Another Referenceable section
"#
.into(),
r#"
@html.page_title = 2
@@style.external_reference = {
"format_unspecific": "[UNSPECIFIC {refname}]",
"format_specific": "[SPECIFIC {refdoc}:{refname}]"
}
§{#ref}[caption=from 0]
§{#ref}
§{#ref2}[caption=from 1]
§{b#ref2}
"#
.into(),
],
)
.unwrap();
assert!(result[1].0.borrow().body.starts_with("<div class=\"content\"><p><a href=\"a.html#Referenceable_section\">(#ref)</a><a href=\"a.html#Referenceable_section\">(a#ref)</a></p>"));
assert!(result[2].0.borrow().body.starts_with("<div class=\"content\"><p><a href=\"a.html#Referenceable_section\">from 0</a><a href=\"a.html#Referenceable_section\">[UNSPECIFIC ref]</a><a href=\"b.html#Another_Referenceable_section\">from 1</a><a href=\"b.html#Another_Referenceable_section\">[SPECIFIC b:ref2]</a></p>"));
} }
} }

40
src/elements/registrar.rs Normal file
View file

@ -0,0 +1,40 @@
use crate::parser::parser::Parser;
use super::code::CodeRule;
use super::comment::CommentRule;
use super::graphviz::GraphRule;
use super::import::ImportRule;
use super::link::LinkRule;
use super::list::ListRule;
use super::media::MediaRule;
use super::paragraph::ParagraphRule;
use super::raw::RawRule;
use super::script::ScriptRule;
use super::section::SectionRule;
use super::style::StyleRule;
use super::tex::TexRule;
use super::text::TextRule;
use super::variable::VariableRule;
use super::variable::VariableSubstitutionRule;
use super::reference::ReferenceRule;
pub fn register<P: Parser>(parser: &mut P) {
parser.add_rule(Box::new(CommentRule::new()), None).unwrap();
parser.add_rule(Box::new(ParagraphRule::new()), None).unwrap();
parser.add_rule(Box::new(ImportRule::new()), None).unwrap();
parser.add_rule(Box::new(ScriptRule::new()), None).unwrap();
parser.add_rule(Box::new(VariableRule::new()), None).unwrap();
parser.add_rule(Box::new(VariableSubstitutionRule::new()), None).unwrap();
parser.add_rule(Box::new(RawRule::new()), None).unwrap();
parser.add_rule(Box::new(ListRule::new()), None).unwrap();
parser.add_rule(Box::new(CodeRule::new()), None).unwrap();
parser.add_rule(Box::new(TexRule::new()), None).unwrap();
parser.add_rule(Box::new(GraphRule::new()), None).unwrap();
parser.add_rule(Box::new(MediaRule::new()), None).unwrap();
parser.add_rule(Box::new(StyleRule::new()), None).unwrap();
parser.add_rule(Box::new(SectionRule::new()), None).unwrap();
parser.add_rule(Box::new(LinkRule::new()), None).unwrap();
parser.add_rule(Box::new(TextRule::default()), None).unwrap();
parser.add_rule(Box::new(ReferenceRule::new()), None).unwrap();
}

View file

@ -1,7 +1,7 @@
use crate::document::document::Document; use crate::document::document::Document;
use crate::lua::kernel::Kernel; use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelContext; use crate::lua::kernel::KernelContext;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -12,6 +12,7 @@ use ariadne::Fmt;
use ariadne::Label; use ariadne::Label;
use ariadne::Report; use ariadne::Report;
use ariadne::ReportKind; use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -20,7 +21,6 @@ use std::rc::Rc;
use super::text::Text; use super::text::Text;
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::script")]
pub struct ScriptRule { pub struct ScriptRule {
re: [Regex; 2], re: [Regex; 2],
eval_kinds: [(&'static str, &'static str); 3], eval_kinds: [(&'static str, &'static str); 3],
@ -78,14 +78,13 @@ impl ScriptRule {
impl RegexRule for ScriptRule { impl RegexRule for ScriptRule {
fn name(&self) -> &'static str { "Script" } fn name(&self) -> &'static str { "Script" }
fn previous(&self) -> Option<&'static str> { Some("Import") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
index: usize, index: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a dyn Document<'a>, document: &'a dyn Document<'a>,
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -94,8 +93,7 @@ impl RegexRule for ScriptRule {
let kernel_name = match matches.get(1) { let kernel_name = match matches.get(1) {
None => "main".to_string(), None => "main".to_string(),
Some(name) => { Some(name) => match ScriptRule::validate_kernel_name(parser.colors(), name.as_str()) {
match ScriptRule::validate_kernel_name(state.parser.colors(), name.as_str()) {
Ok(name) => name, Ok(name) => name,
Err(e) => { Err(e) => {
reports.push( reports.push(
@ -104,23 +102,17 @@ impl RegexRule for ScriptRule {
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(e) .with_message(e)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
return reports; return reports;
} }
} },
}
};
let mut kernels_borrow = state.shared.kernels.borrow_mut();
let kernel = match kernels_borrow.get(kernel_name.as_str()) {
Some(kernel) => kernel,
None => {
kernels_borrow.insert(kernel_name.clone(), Kernel::new(state.parser));
kernels_borrow.get(kernel_name.as_str()).unwrap()
}
}; };
let kernel = parser
.get_kernel(kernel_name.as_str())
.unwrap_or_else(|| parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser)));
let kernel_data = matches let kernel_data = matches
.get(if index == 0 { 2 } else { 3 }) .get(if index == 0 { 2 } else { 3 })
@ -135,7 +127,7 @@ impl RegexRule for ScriptRule {
.with_label( .with_label(
Label::new((token.source(), token.start() + 1..token.end())) Label::new((token.source(), token.start() + 1..token.end()))
.with_message("Kernel code is empty") .with_message("Kernel code is empty")
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -172,9 +164,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len())) Label::new((source.clone(), 0..source.content().len()))
.with_message(format!( .with_message(format!(
"Kernel execution failed:\n{}", "Kernel execution failed:\n{}",
e e.to_string()
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -186,7 +178,7 @@ impl RegexRule for ScriptRule {
// Validate kind // Validate kind
let kind = match matches.get(2) { let kind = match matches.get(2) {
None => 0, None => 0,
Some(kind) => match self.validate_kind(state.parser.colors(), kind.as_str()) { Some(kind) => match self.validate_kind(parser.colors(), kind.as_str()) {
Ok(kind) => kind, Ok(kind) => kind,
Err(msg) => { Err(msg) => {
reports.push( reports.push(
@ -195,7 +187,7 @@ impl RegexRule for ScriptRule {
.with_label( .with_label(
Label::new((token.source(), kind.range())) Label::new((token.source(), kind.range()))
.with_message(msg) .with_message(msg)
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -215,9 +207,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len())) Label::new((source.clone(), 0..source.content().len()))
.with_message(format!( .with_message(format!(
"Kernel evaluation failed:\n{}", "Kernel evaluation failed:\n{}",
e e.to_string()
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -231,7 +223,7 @@ impl RegexRule for ScriptRule {
// Eval to text // Eval to text
{ {
if !result.is_empty() { if !result.is_empty() {
state.push( parser.push(
document, document,
Box::new(Text::new( Box::new(Text::new(
Token::new(1..source.content().len(), source.clone()), Token::new(1..source.content().len(), source.clone()),
@ -248,11 +240,7 @@ impl RegexRule for ScriptRule {
result, result,
)) as Rc<dyn Source>; )) as Rc<dyn Source>;
state.with_state(|new_state| { parser.parse_into(parse_source, document);
new_state
.parser
.parse_into(new_state, parse_source, document);
})
} }
} }
Err(e) => { Err(e) => {
@ -263,9 +251,9 @@ impl RegexRule for ScriptRule {
Label::new((source.clone(), 0..source.content().len())) Label::new((source.clone(), 0..source.content().len()))
.with_message(format!( .with_message(format!(
"Kernel evaluation failed:\n{}", "Kernel evaluation failed:\n{}",
e e.to_string()
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -279,69 +267,13 @@ impl RegexRule for ScriptRule {
let ctx = KernelContext { let ctx = KernelContext {
location: Token::new(0..source.content().len(), source.clone()), location: Token::new(0..source.content().len(), source.clone()),
state, parser,
document, document,
}; };
kernel.run_with_context(ctx, execute) kernel.run_with_context(ctx, execute)
} }
}
// TODO
#[cfg(test)] fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
mod tests {
use super::*;
use crate::elements::link::Link;
use crate::elements::list::ListEntry;
use crate::elements::list::ListMarker;
use crate::elements::paragraph::Paragraph;
use crate::elements::style::Style;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Simple evals:
* %< 1+1>%
* %<" 1+1>% = 2
* %<! "**bold**">%
Definition:
@<
function make_ref(name, ref)
return "[" .. name .. "](#" .. ref .. ")"
end
>@
Evaluation: %<! make_ref("hello", "id")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph;
ListMarker;
ListEntry {};
ListEntry {
Text { content == "2" };
Text { content == " = 2" };
};
ListEntry {
Style;
Text { content == "bold" };
Style;
};
ListMarker;
Paragraph {
Text; Text;
Link { url == "#id" } { Text { content == "hello" }; };
};
);
}
} }

View file

@ -5,11 +5,10 @@ use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::document::element::ReferenceableElement; use crate::document::element::ReferenceableElement;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
use crate::parser::source::Token; use crate::parser::source::Token;
use crate::parser::style::StyleHolder;
use ariadne::Fmt; use ariadne::Fmt;
use ariadne::Label; use ariadne::Label;
use ariadne::Report; use ariadne::Report;
@ -18,91 +17,35 @@ use mlua::Error::BadArgument;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use regex::Regex; use regex::Regex;
use section_style::SectionLinkPos;
use section_style::SectionStyle;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use super::reference::InternalReference;
#[derive(Debug)] #[derive(Debug)]
pub struct Section { pub struct Section {
pub(self) location: Token, pub(self) location: Token,
/// Title of the section pub(self) title: String, // Section title
pub(self) title: String, pub(self) depth: usize, // Section depth
/// Depth i.e number of '#' pub(self) kind: u8, // Section kind, e.g numbered, unnumbred, ...
pub(self) depth: usize, pub(self) reference: Option<String>, // Section reference name
/// [`section_kind`]
pub(self) kind: u8,
/// Section reference name
pub(self) reference: Option<String>,
/// Style of the section
pub(self) style: Rc<section_style::SectionStyle>,
} }
impl Element for Section { impl Element for Section {
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block } fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Section" } fn element_name(&self) -> &'static str { "Section" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => Ok(format!(
// Section numbering "<h{0}>{1}</h{0}>",
let number = if (self.kind & section_kind::NO_NUMBER) != section_kind::NO_NUMBER {
let numbering = compiler.section_counter(self.depth);
let mut result = String::new();
for num in numbering.iter() {
result = result + num.to_string().as_str() + ".";
}
result += " ";
result
} else {
String::new()
};
if self.style.link_pos == SectionLinkPos::None {
return Ok(format!(
r#"<h{0} id="{1}">{number}{2}</h{0}>"#,
self.depth, self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str()) Compiler::sanitize(compiler.target(), self.title.as_str())
)); )),
}
let refname = Compiler::refname(compiler.target(), self.title.as_str());
let link = format!(
"{}<a class=\"section-link\" href=\"#{refname}\">{}</a>{}",
Compiler::sanitize(compiler.target(), self.style.link[0].as_str()),
Compiler::sanitize(compiler.target(), self.style.link[1].as_str()),
Compiler::sanitize(compiler.target(), self.style.link[2].as_str())
);
if self.style.link_pos == SectionLinkPos::After {
Ok(format!(
r#"<h{0} id="{1}">{number}{2}{link}</h{0}>"#,
self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str())
))
} else
// Before
{
Ok(format!(
r#"<h{0} id="{1}">{link}{number}{2}</h{0}>"#,
self.depth,
Compiler::refname(compiler.target(), self.title.as_str()),
Compiler::sanitize(compiler.target(), self.title.as_str())
))
}
}
Target::LATEX => Err("Unimplemented compiler".to_string()), Target::LATEX => Err("Unimplemented compiler".to_string()),
} }
} }
fn as_referenceable(&self) -> Option<&dyn ReferenceableElement> { Some(self) }
} }
impl ReferenceableElement for Section { impl ReferenceableElement for Section {
@ -113,35 +56,14 @@ impl ReferenceableElement for Section {
fn compile_reference( fn compile_reference(
&self, &self,
compiler: &Compiler, compiler: &Compiler,
_document: &dyn Document, document: &dyn Document,
reference: &InternalReference, reference: &super::reference::Reference,
_refid: usize, refid: usize,
) -> Result<String, String> { ) -> Result<String, String> {
match compiler.target() { todo!()
Target::HTML => {
let caption = reference.caption().map_or(
format!(
"({})",
Compiler::sanitize(compiler.target(), self.title.as_str())
),
|cap| cap.clone(),
);
Ok(format!(
"<a class=\"section-reference\" href=\"#{}\">{caption}</a>",
Compiler::refname(compiler.target(), self.title.as_str())
))
}
_ => todo!(""),
} }
} }
fn refid(&self, compiler: &Compiler, _refid: usize) -> String {
Compiler::refname(compiler.target(), self.title.as_str())
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::section")]
pub struct SectionRule { pub struct SectionRule {
re: [Regex; 1], re: [Regex; 1],
} }
@ -162,14 +84,13 @@ pub mod section_kind {
impl RegexRule for SectionRule { impl RegexRule for SectionRule {
fn name(&self) -> &'static str { "Section" } fn name(&self) -> &'static str { "Section" }
fn previous(&self) -> Option<&'static str> { Some("Custom Style") }
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match( fn on_regex_match(
&self, &self,
_: usize, _: usize,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
token: Token, token: Token,
matches: regex::Captures, matches: regex::Captures,
@ -184,9 +105,9 @@ impl RegexRule for SectionRule {
.with_label( .with_label(
Label::new((token.source(), depth.range())) Label::new((token.source(), depth.range()))
.with_message(format!("Section is of depth {}, which is greather than {} (maximum depth allowed)", .with_message(format!("Section is of depth {}, which is greather than {} (maximum depth allowed)",
depth.len().fg(state.parser.colors().info), depth.len().fg(parser.colors().info),
6.fg(state.parser.colors().info))) 6.fg(parser.colors().info)))
.with_color(state.parser.colors().error)) .with_color(parser.colors().error))
.finish()); .finish());
return result; return result;
} }
@ -197,34 +118,34 @@ impl RegexRule for SectionRule {
}; };
// [Optional] Reference name // [Optional] Reference name
let section_refname = let section_refname = matches.get(2).map_or_else(
matches.get(2).map_or_else(
|| None, || None,
|refname| { |refname| {
/* TODO: Wait for reference rework
// Check for duplicate reference // Check for duplicate reference
if let Some(elem_reference) = document.get_reference(refname.as_str()) { if let Some((ref_doc, reference)) = document.get_reference(refname.as_str())
let elem = document.get_from_reference(&elem_reference).unwrap(); {
result.push( result.push(
Report::build(ReportKind::Warning, token.source(), refname.start()) Report::build(ReportKind::Warning, token.source(), refname.start())
.with_message("Duplicate reference name") .with_message("Duplicate reference name")
.with_label( .with_label(
Label::new((token.source(), refname.range())) Label::new((token.source(), refname.range()))
.with_message(format!("Reference with name `{}` is already defined in `{}`", .with_message(format!("Reference with name `{}` is already defined in `{}`",
refname.as_str().fg(state.parser.colors().highlight), refname.as_str().fg(parser.colors().highlight),
elem.location().source().name().as_str().fg(state.parser.colors().highlight))) ref_doc.source().name().as_str().fg(parser.colors().highlight)))
.with_message(format!("`{}` conflicts with previously defined reference to {}", .with_message(format!("`{}` conflicts with previously defined reference to {}",
refname.as_str().fg(state.parser.colors().highlight), refname.as_str().fg(parser.colors().highlight),
elem.element_name().fg(state.parser.colors().highlight))) reference.element_name().fg(parser.colors().highlight)))
.with_color(state.parser.colors().warning)) .with_color(parser.colors().warning))
.with_label( .with_label(
Label::new((elem.location().source(), elem.location().start()..elem.location().end() )) Label::new((ref_doc.source(), reference.location().start()+1..reference.location().end() ))
.with_message(format!("`{}` previously defined here", .with_message(format!("`{}` previously defined here",
refname.as_str().fg(state.parser.colors().highlight))) refname.as_str().fg(parser.colors().highlight)))
.with_color(state.parser.colors().warning)) .with_color(parser.colors().warning))
.with_note("Previous reference was overwritten".to_string()) .with_note(format!("Previous reference was overwritten"))
.finish()); .finish());
} }
*/
Some(refname.as_str().to_string()) Some(refname.as_str().to_string())
}, },
); );
@ -243,11 +164,11 @@ impl RegexRule for SectionRule {
.with_label( .with_label(
Label::new((token.source(), kind.range())) Label::new((token.source(), kind.range()))
.with_message(format!("Section numbering kind must be a combination of `{}` for unnumbered, and `{}` for non-listing; got `{}`", .with_message(format!("Section numbering kind must be a combination of `{}` for unnumbered, and `{}` for non-listing; got `{}`",
"*".fg(state.parser.colors().info), "*".fg(parser.colors().info),
"+".fg(state.parser.colors().info), "+".fg(parser.colors().info),
kind.as_str().fg(state.parser.colors().highlight))) kind.as_str().fg(parser.colors().highlight)))
.with_color(state.parser.colors().error)) .with_color(parser.colors().error))
.with_help("Leave empty for a numbered listed section".to_string()) .with_help(format!("Leave empty for a numbered listed section"))
.finish()); .finish());
return result; return result;
} }
@ -274,7 +195,7 @@ impl RegexRule for SectionRule {
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message("Sections require a name before line end") .with_message("Sections require a name before line end")
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -289,8 +210,8 @@ impl RegexRule for SectionRule {
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message("Sections require at least one whitespace before the section's name") .with_message("Sections require at least one whitespace before the section's name")
.with_color(state.parser.colors().warning)) .with_color(parser.colors().warning))
.with_help(format!("Add a space before `{}`", section_name.fg(state.parser.colors().highlight))) .with_help(format!("Add a space before `{}`", section_name.fg(parser.colors().highlight)))
.finish()); .finish());
return result; return result;
} }
@ -300,16 +221,7 @@ impl RegexRule for SectionRule {
_ => panic!("Empty section name"), _ => panic!("Empty section name"),
}; };
// Get style parser.push(
let style = state
.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
state.push(
document, document,
Box::new(Section { Box::new(Section {
location: token.clone(), location: token.clone(),
@ -317,21 +229,20 @@ impl RegexRule for SectionRule {
depth: section_depth, depth: section_depth,
kind: section_kind, kind: section_kind,
reference: section_refname, reference: section_refname,
style,
}), }),
); );
result return result;
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![]; let mut bindings = vec![];
bindings.push(( bindings.push((
"push".to_string(), "push".to_string(),
lua.create_function( lua.create_function(
|_, (title, depth, kind, reference): (String, usize, Option<String>, Option<String>)| { |_, (title, depth, kind, reference): (String, usize, String, Option<String>)| {
let kind = match kind.as_deref().unwrap_or("") { let kind = match kind.as_str() {
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC, "*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
"*" => section_kind::NO_NUMBER, "*" => section_kind::NO_NUMBER,
"+" => section_kind::NO_TOC, "+" => section_kind::NO_TOC,
@ -341,24 +252,16 @@ impl RegexRule for SectionRule {
to: Some("push".to_string()), to: Some("push".to_string()),
pos: 3, pos: 3,
name: Some("kind".to_string()), name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external("Unknown section kind specified".to_string())), cause: Arc::new(mlua::Error::external(format!(
"Unknown section kind specified"
))),
}) })
} }
}; };
CTX.with_borrow(|ctx| { CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
// Get style ctx.parser.push(
let style = ctx
.state
.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
ctx.state.push(
ctx.document, ctx.document,
Box::new(Section { Box::new(Section {
location: ctx.location.clone(), location: ctx.location.clone(),
@ -366,7 +269,6 @@ impl RegexRule for SectionRule {
depth, depth,
kind, kind,
reference, reference,
style,
}), }),
); );
}) })
@ -380,137 +282,4 @@ impl RegexRule for SectionRule {
bindings bindings
} }
fn register_styles(&self, holder: &mut StyleHolder) {
holder.set_current(Rc::new(SectionStyle::default()));
}
}
mod section_style {
use serde::Deserialize;
use serde::Serialize;
use crate::impl_elementstyle;
pub static STYLE_KEY: &str = "style.section";
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
pub enum SectionLinkPos {
Before,
After,
None,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct SectionStyle {
pub link_pos: SectionLinkPos,
pub link: [String; 3],
}
impl Default for SectionStyle {
fn default() -> Self {
Self {
link_pos: SectionLinkPos::Before,
link: ["".into(), "🔗".into(), " ".into()],
}
}
}
impl_elementstyle!(SectionStyle, STYLE_KEY);
}
#[cfg(test)]
mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
# 1
##+ 2
###* 3
####+* 4
#####*+ 5
######{refname} 6
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section { depth == 1, title == "1" };
Section { depth == 2, title == "2", kind == section_kind::NO_TOC };
Section { depth == 3, title == "3", kind == section_kind::NO_NUMBER };
Section { depth == 4, title == "4", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 5, title == "5", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 6, title == "6", reference == Some("refname".to_string()) };
);
}
#[test]
fn lua() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
%<
nml.section.push("1", 1, "", nil)
nml.section.push("2", 2, "+", nil)
nml.section.push("3", 3, "*", nil)
nml.section.push("4", 4, "+*", nil)
nml.section.push("5", 5, "*+", nil)
nml.section.push("6", 6, "", "refname")
>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Section { depth == 1, title == "1" };
Section { depth == 2, title == "2", kind == section_kind::NO_TOC };
Section { depth == 3, title == "3", kind == section_kind::NO_NUMBER };
Section { depth == 4, title == "4", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 5, title == "5", kind == section_kind::NO_NUMBER | section_kind::NO_TOC };
Section { depth == 6, title == "6", reference == Some("refname".to_string()) };
);
}
#[test]
fn style() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
@@style.section = {
"link_pos": "None",
"link": ["a", "b", "c"]
}
"#
.to_string(),
None,
));
let parser = LangParser::default();
let state = ParserState::new(&parser, None);
let (_, state) = parser.parse(state, source, None);
let style = state.shared
.styles
.borrow()
.current(section_style::STYLE_KEY)
.downcast_rc::<SectionStyle>()
.unwrap();
assert_eq!(style.link_pos, SectionLinkPos::None);
assert_eq!(style.link, ["a".to_string(), "b".to_string(), "c".to_string()]);
}
} }

View file

@ -1,27 +1,10 @@
use crate::compiler::compiler::Compiler; use mlua::{Function, Lua};
use crate::compiler::compiler::Target; use regex::{Captures, Regex};
use crate::document::document::Document; use crate::{compiler::compiler::{Compiler, Target}, document::{document::{DocumentAccessors, Document}, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, state::State}};
use crate::document::document::DocumentAccessors; use ariadne::{Fmt, Label, Report, ReportKind};
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use crate::parser::state::RuleState;
use crate::parser::state::Scope; use crate::parser::state::Scope;
use ariadne::Fmt; use std::{cell::RefCell, ops::Range, rc::Rc};
use ariadne::Label; use lazy_static::lazy_static;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use regex::Captures;
use regex::Regex;
use std::cell::RefCell;
use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use super::paragraph::Paragraph; use super::paragraph::Paragraph;
@ -32,107 +15,104 @@ pub struct Style {
close: bool, close: bool,
} }
impl Style { impl Style
{
pub fn new(location: Token, kind: usize, close: bool) -> Self { pub fn new(location: Token, kind: usize, close: bool) -> Self {
Self { Self { location, kind, close }
location,
kind,
close,
}
} }
} }
impl Element for Style { impl Element for Style
{
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline } fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Style" } fn element_name(&self) -> &'static str { "Section" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn to_string(&self) -> String { format!("{self:#?}") }
match compiler.target() { fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
match compiler.target()
{
Target::HTML => { Target::HTML => {
Ok([ Ok([
// Bold // Bold
"<b>", "</b>", // Italic "<b>", "</b>",
"<i>", "</i>", // Underline // Italic
"<u>", "</u>", // Code "<i>", "</i>",
// Underline
"<u>", "</u>",
// Code
"<em>", "</em>", "<em>", "</em>",
][self.kind * 2 + self.close as usize] ][self.kind*2 + self.close as usize].to_string())
.to_string())
} }
Target::LATEX => Err("Unimplemented compiler".to_string()), Target::LATEX => Err("Unimplemented compiler".to_string())
} }
} }
} }
struct StyleState { struct StyleState
toggled: [Option<Token>; 4], {
toggled: [Option<Token>; 4]
} }
impl StyleState { impl StyleState {
const NAMES : [&'static str; 4] = ["Bold", "Italic", "Underline", "Code"]; const NAMES : [&'static str; 4] = ["Bold", "Italic", "Underline", "Code"];
fn new() -> Self { fn new() -> Self {
Self { Self { toggled: [None, None, None, None] }
toggled: [None, None, None, None],
}
} }
} }
impl RuleState for StyleState { impl State for StyleState
{
fn scope(&self) -> Scope { Scope::PARAGRAPH } fn scope(&self) -> Scope { Scope::PARAGRAPH }
fn on_remove<'a>( fn on_remove<'a>(&self, parser: &dyn Parser, document: &dyn Document) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
&self, let mut result = Vec::new();
state: &ParserState,
document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![];
self.toggled self.toggled
.iter() .iter()
.zip(StyleState::NAMES) .zip(StyleState::NAMES)
.for_each(|(token, name)| { .for_each(|(token, name)|
if token.is_none() { {
return; if token.is_none() { return } // Style not enabled
} // Style not enabled
let token = token.as_ref().unwrap(); let token = token.as_ref().unwrap();
//let range = range.as_ref().unwrap();
//let active_range = range.start .. paragraph.location().end()-1;
let paragraph = document.last_element::<Paragraph>().unwrap(); let paragraph = document.last_element::<Paragraph>().unwrap();
let paragraph_end = paragraph let paragraph_end = paragraph.content.last()
.content .and_then(|last| Some((last.location().source(), last.location().end()-1 .. last.location().end())))
.last().map(|last| (
last.location().source(),
last.location().end() - 1..last.location().end(),
))
.unwrap(); .unwrap();
reports.push( // TODO: Allow style to span multiple documents if they don't break paragraph.
result.push(
Report::build(ReportKind::Error, token.source(), token.start()) Report::build(ReportKind::Error, token.source(), token.start())
.with_message("Unterminated Style") .with_message("Unterminated style")
//.with_label(
// Label::new((document.source(), active_range.clone()))
// .with_order(0)
// .with_message(format!("Style {} is not terminated before the end of paragraph",
// name.fg(parser.colors().info)))
// .with_color(parser.colors().error))
.with_label( .with_label(
Label::new((token.source(), token.range.clone())) Label::new((token.source(), token.range.clone()))
.with_order(1) .with_order(1)
.with_message(format!( .with_message(format!("Style {} starts here",
"Style {} starts here", name.fg(parser.colors().info)))
name.fg(state.parser.colors().info) .with_color(parser.colors().info))
))
.with_color(state.parser.colors().error),
)
.with_label( .with_label(
Label::new(paragraph_end) Label::new(paragraph_end)
.with_order(1) .with_order(1)
.with_message("Paragraph ends here".to_string()) .with_message(format!("Paragraph ends here"))
.with_color(state.parser.colors().error), .with_color(parser.colors().info))
)
.with_note("Styles cannot span multiple documents (i.e @import)") .with_note("Styles cannot span multiple documents (i.e @import)")
.finish(), .finish());
);
}); });
reports return result;
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::style")]
pub struct StyleRule { pub struct StyleRule {
re: [Regex; 4], re: [Regex; 4],
} }
@ -148,38 +128,31 @@ impl StyleRule {
// Underline // Underline
Regex::new(r"__").unwrap(), Regex::new(r"__").unwrap(),
// Code // Code
Regex::new(r"`").unwrap(), Regex::new(r"`").unwrap()
], ]
} }
} }
} }
static STATE_NAME: &str = "elements.style"; lazy_static! {
static ref STATE_NAME : String = "elements.style".to_string();
}
impl RegexRule for StyleRule { impl RegexRule for StyleRule
{
fn name(&self) -> &'static str { "Style" } fn name(&self) -> &'static str { "Style" }
fn previous(&self) -> Option<&'static str> { Some("Layout") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match( fn on_regex_match(&self, index: usize, parser: &dyn Parser, document: &dyn Document, token: Token, _matches: Captures) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
&self, let result = vec![];
index: usize,
state: &ParserState, let query = parser.state().query(&STATE_NAME);
document: &dyn Document, let state = match query
token: Token, {
_matches: Captures,
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>> {
let query = state.shared.rule_state.borrow().get(STATE_NAME);
let style_state = match query {
Some(state) => state, Some(state) => state,
None => { None => { // Insert as a new state
// Insert as a new state match parser.state_mut().insert(STATE_NAME.clone(), Rc::new(RefCell::new(StyleState::new())))
match state
.shared
.rule_state
.borrow_mut()
.insert(STATE_NAME.into(), Rc::new(RefCell::new(StyleState::new())))
{ {
Err(_) => panic!("Unknown error"), Err(_) => panic!("Unknown error"),
Ok(state) => state, Ok(state) => state,
@ -187,181 +160,28 @@ impl RegexRule for StyleRule {
} }
}; };
if let Some(style_state) = style_state.borrow_mut().downcast_mut::<StyleState>() { if let Some(style_state) = state
style_state.toggled[index] = style_state.toggled[index] .borrow_mut()
.clone() .as_any_mut()
.map_or(Some(token.clone()), |_| None); .downcast_mut::<StyleState>()
state.push( {
document, style_state.toggled[index] = style_state.toggled[index].clone().map_or(Some(token.clone()), |_| None);
Box::new(Style::new( parser.push(document, Box::new(
Style::new(
token.clone(), token.clone(),
index, index,
style_state.toggled[index].is_none(), !style_state.toggled[index].is_some()
)), )
); ));
} else {
panic!("Invalid state at `{STATE_NAME}`");
} }
else
vec![]
}
fn register_bindings<'lua>(&self, lua: &'lua mlua::Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![];
bindings.push((
"toggle".to_string(),
lua.create_function(|_, style: String| {
let kind = match style.as_str() {
"bold" | "Bold" => 0,
"italic" | "Italic" => 1,
"underline" | "Underline" => 2,
"emphasis" | "Emphasis" => 3,
_ => {
return Err(mlua::Error::BadArgument {
to: Some("toggle".to_string()),
pos: 1,
name: Some("style".to_string()),
cause: Arc::new(mlua::Error::external("Unknown style specified".to_string())),
})
}
};
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let query = ctx.state.shared.rule_state.borrow().get(STATE_NAME);
let style_state = match query {
Some(state) => state,
None => {
// Insert as a new state
match ctx.state.shared.rule_state.borrow_mut().insert(
STATE_NAME.into(),
Rc::new(RefCell::new(StyleState::new())),
) {
Err(_) => panic!("Unknown error"),
Ok(state) => state,
}
}
};
if let Some(style_state) =
style_state.borrow_mut().downcast_mut::<StyleState>()
{ {
style_state.toggled[kind] = style_state.toggled[kind] panic!("Invalid state at `{}`", STATE_NAME.as_str());
.clone()
.map_or(Some(ctx.location.clone()), |_| None);
ctx.state.push(
ctx.document,
Box::new(Style::new(
ctx.location.clone(),
kind,
style_state.toggled[kind].is_none(),
)),
);
} else {
panic!("Invalid state at `{STATE_NAME}`");
};
})
});
Ok(())
})
.unwrap(),
));
bindings
}
} }
#[cfg(test)] return result;
mod tests {
use crate::elements::text::Text;
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*;
#[test]
fn parser() {
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some *style
terminated here*
**BOLD + *italic***
__`UNDERLINE+EM`__
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text;
Style { kind == 1, close == false };
Text;
Style { kind == 1, close == true };
};
Paragraph {
Style { kind == 0, close == false }; // **
Text;
Style { kind == 1, close == false }; // *
Text;
Style { kind == 0, close == true }; // **
Style { kind == 1, close == true }; // *
Style { kind == 2, close == false }; // __
Style { kind == 3, close == false }; // `
Text;
Style { kind == 3, close == true }; // `
Style { kind == 2, close == true }; // __
};
);
} }
#[test] // TODO
fn lua() { fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
Some %<nml.style.toggle("italic")>%style
terminated here%<nml.style.toggle("Italic")>%
%<nml.style.toggle("Bold")>%NOLD + %<nml.style.toggle("italic")>%italic%<nml.style.toggle("bold") nml.style.toggle("italic")>%
%<nml.style.toggle("Underline") nml.style.toggle("Emphasis")>%UNDERLINE+EM%<nml.style.toggle("emphasis")>%%<nml.style.toggle("underline")>%
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None);
validate_document!(doc.content().borrow(), 0,
Paragraph {
Text;
Style { kind == 1, close == false };
Text;
Style { kind == 1, close == true };
};
Paragraph {
Style { kind == 0, close == false }; // **
Text;
Style { kind == 1, close == false }; // *
Text;
Style { kind == 0, close == true }; // **
Style { kind == 1, close == true }; // *
Style { kind == 2, close == false }; // __
Style { kind == 3, close == false }; // `
Text;
Style { kind == 3, close == true }; // `
Style { kind == 2, close == true }; // __
};
);
}
} }

View file

@ -6,7 +6,6 @@ use std::process::Command;
use std::process::Stdio; use std::process::Stdio;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc;
use std::sync::Once; use std::sync::Once;
use ariadne::Fmt; use ariadne::Fmt;
@ -28,8 +27,7 @@ use crate::compiler::compiler::Target;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors; use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule; use crate::parser::rule::RegexRule;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -114,7 +112,10 @@ impl FormattedTex {
} }
let mut result = String::new(); let mut result = String::new();
if let Err(e) = process.stdout.unwrap().read_to_string(&mut result) { panic!("Unable to read `latex2svg` stdout: {}", e) } match process.stdout.unwrap().read_to_string(&mut result) {
Err(e) => panic!("Unable to read `latex2svg` stdout: {}", e),
Ok(_) => {}
}
println!("Done!"); println!("Done!");
Ok(result) Ok(result)
@ -150,18 +151,15 @@ impl Element for Tex {
fn element_name(&self) -> &'static str { "LaTeX" } fn element_name(&self) -> &'static str { "LaTeX" }
fn compile( fn to_string(&self) -> String { format!("{self:#?}") }
&self,
compiler: &Compiler, fn compile(&self, compiler: &Compiler, document: &dyn Document) -> Result<String, String> {
document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
static CACHE_INIT: Once = Once::new(); static CACHE_INIT: Once = Once::new();
CACHE_INIT.call_once(|| { CACHE_INIT.call_once(|| {
if let Some(con) = compiler.cache() { if let Some(mut con) = compiler.cache() {
if let Err(e) = FormattedTex::init(con) { if let Err(e) = FormattedTex::init(&mut con) {
eprintln!("Unable to create cache table: {e}"); eprintln!("Unable to create cache table: {e}");
} }
} }
@ -191,8 +189,8 @@ impl Element for Tex {
Tex::format_latex(&fontsize, &preamble, &format!("{prepend}{}", self.tex)) Tex::format_latex(&fontsize, &preamble, &format!("{prepend}{}", self.tex))
}; };
let result = if let Some(con) = compiler.cache() { let mut result = if let Some(mut con) = compiler.cache() {
match latex.cached(con, |s| s.latex_to_svg(&exec, &fontsize)) { match latex.cached(&mut con, |s| s.latex_to_svg(&exec, &fontsize)) {
Ok(s) => Ok(s), Ok(s) => Ok(s),
Err(e) => match e { Err(e) => match e {
CachedError::SqlErr(e) => { CachedError::SqlErr(e) => {
@ -225,7 +223,6 @@ impl Element for Tex {
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::tex")]
pub struct TexRule { pub struct TexRule {
re: [Regex; 2], re: [Regex; 2],
properties: PropertyParser, properties: PropertyParser,
@ -256,7 +253,7 @@ impl TexRule {
.unwrap(), .unwrap(),
Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(), Regex::new(r"\$(?:\[((?:\\.|[^\\\\])*?)\])?(?:((?:\\.|[^\\\\])*?)\$)?").unwrap(),
], ],
properties: PropertyParser { properties: props }, properties: PropertyParser::new(props),
} }
} }
@ -303,14 +300,13 @@ impl TexRule {
impl RegexRule for TexRule { impl RegexRule for TexRule {
fn name(&self) -> &'static str { "Tex" } fn name(&self) -> &'static str { "Tex" }
fn previous(&self) -> Option<&'static str> { Some("Code") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match( fn on_regex_match(
&self, &self,
index: usize, index: usize,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
token: Token, token: Token,
matches: Captures, matches: Captures,
@ -327,10 +323,10 @@ impl RegexRule for TexRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Missing terminating `{}` after first `{}`", "Missing terminating `{}` after first `{}`",
["|$", "$"][index].fg(state.parser.colors().info), ["|$", "$"][index].fg(parser.colors().info),
["$|", "$"][index].fg(state.parser.colors().info) ["$|", "$"][index].fg(parser.colors().info)
)) ))
.with_color(state.parser.colors().error), .with_color(parser.colors().error),
) )
.finish(), .finish(),
); );
@ -350,7 +346,7 @@ impl RegexRule for TexRule {
.with_label( .with_label(
Label::new((token.source().clone(), content.range())) Label::new((token.source().clone(), content.range()))
.with_message("Tex code is empty") .with_message("Tex code is empty")
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
@ -360,8 +356,7 @@ impl RegexRule for TexRule {
}; };
// Properties // Properties
let properties = match self.parse_properties(state.parser.colors(), &token, &matches.get(1)) let properties = match self.parse_properties(parser.colors(), &token, &matches.get(1)) {
{
Ok(pm) => pm, Ok(pm) => pm,
Err(report) => { Err(report) => {
reports.push(report); reports.push(report);
@ -383,16 +378,16 @@ impl RegexRule for TexRule {
Label::new((token.source().clone(), token.range.clone())) Label::new((token.source().clone(), token.range.clone()))
.with_message(format!( .with_message(format!(
"Property `kind: {}` cannot be converted: {}", "Property `kind: {}` cannot be converted: {}",
prop.fg(state.parser.colors().info), prop.fg(parser.colors().info),
err.fg(state.parser.colors().error) err.fg(parser.colors().error)
)) ))
.with_color(state.parser.colors().warning), .with_color(parser.colors().warning),
) )
.finish(), .finish(),
); );
return reports; return reports;
} }
PropertyMapError::NotFoundError(_) => { PropertyMapError::NotFoundError(err) => {
if index == 1 { if index == 1 {
TexKind::Inline TexKind::Inline
} else { } else {
@ -407,17 +402,19 @@ impl RegexRule for TexRule {
.get("caption", |_, value| -> Result<String, ()> { .get("caption", |_, value| -> Result<String, ()> {
Ok(value.clone()) Ok(value.clone())
}) })
.ok().map(|(_, value)| value); .ok()
.and_then(|(_, value)| Some(value));
// Environ // Environ
let tex_env = properties let tex_env = properties
.get("env", |_, value| -> Result<String, ()> { .get("env", |_, value| -> Result<String, ()> {
Ok(value.clone()) Ok(value.clone())
}) })
.ok().map(|(_, value)| value) .ok()
.and_then(|(_, value)| Some(value))
.unwrap(); .unwrap();
state.push( parser.push(
document, document,
Box::new(Tex { Box::new(Tex {
mathmode: index == 1, mathmode: index == 1,
@ -432,103 +429,14 @@ impl RegexRule for TexRule {
reports reports
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { // TODO
let mut bindings = vec![]; fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
bindings.push((
"push_math".to_string(),
lua.create_function(
|_, (kind, tex, env, caption): (String, String, Option<String>, Option<String>)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let kind = match TexKind::from_str(kind.as_str()) {
Ok(kind) => kind,
Err(err) => {
result = Err(mlua::Error::BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get tex kind: {err}"
))),
});
return;
}
};
ctx.state.push(
ctx.document,
Box::new(Tex {
location: ctx.location.clone(),
mathmode: true,
kind,
env: env.unwrap_or("main".to_string()),
tex,
caption,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings.push((
"push".to_string(),
lua.create_function(
|_, (kind, tex, env, caption): (String, String, Option<String>, Option<String>)| {
let mut result = Ok(());
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let kind = match TexKind::from_str(kind.as_str()) {
Ok(kind) => kind,
Err(err) => {
result = Err(mlua::Error::BadArgument {
to: Some("push".to_string()),
pos: 2,
name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external(format!(
"Unable to get tex kind: {err}"
))),
});
return;
}
};
ctx.state.push(
ctx.document,
Box::new(Tex {
location: ctx.location.clone(),
mathmode: false,
kind,
env: env.unwrap_or("main".to_string()),
tex,
caption,
}),
);
})
});
result
},
)
.unwrap(),
));
bindings
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::elements::paragraph::Paragraph;
use crate::parser::langparser::LangParser; use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile; use crate::parser::source::SourceFile;
use crate::validate_document;
use super::*; use super::*;
@ -540,24 +448,26 @@ mod tests {
$[kind=block, caption=Some\, text\\] 1+1=2 $ $[kind=block, caption=Some\, text\\] 1+1=2 $
$|[env=another] Non Math \LaTeX|$ $|[env=another] Non Math \LaTeX|$
$[kind=block,env=another] e^{i\pi}=-1$ $[kind=block,env=another] e^{i\pi}=-1$
%<nml.tex.push_math("block", "1+1=2", nil, "Some, text\\")>%
%<nml.tex.push("block", "Non Math \\LaTeX", "another", nil)>%
%<nml.tex.push_math("block", "e^{i\\pi}=-1", "another", nil)>%
"# "#
.to_string(), .to_string(),
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0, let borrow = doc.content().borrow();
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; let found = borrow
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" }; .iter()
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; .filter_map(|e| e.downcast_ref::<Tex>())
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; .collect::<Vec<_>>();
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another" };
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; assert_eq!(found[0].tex, "1+1=2");
); assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
} }
#[test] #[test]
@ -568,25 +478,31 @@ $[kind=block,env=another] e^{i\pi}=-1$
$[ caption=Some\, text\\] 1+1=2 $ $[ caption=Some\, text\\] 1+1=2 $
$|[env=another, kind=inline , caption = Enclosed \]. ] Non Math \LaTeX|$ $|[env=another, kind=inline , caption = Enclosed \]. ] Non Math \LaTeX|$
$[env=another] e^{i\pi}=-1$ $[env=another] e^{i\pi}=-1$
%<nml.tex.push_math("inline", "1+1=2", "main", "Some, text\\")>%
%<nml.tex.push("inline", "Non Math \\LaTeX", "another", "Enclosed ].")>%
%<nml.tex.push_math("inline", "e^{i\\pi}=-1", "another", nil)>%
"# "#
.to_string(), .to_string(),
None, None,
)); ));
let parser = LangParser::default(); let parser = LangParser::default();
let (doc, _) = parser.parse(ParserState::new(&parser, None), source, None); let doc = parser.parse(source, None);
validate_document!(doc.content().borrow(), 0, let borrow = doc.content().borrow();
Paragraph { let found = borrow
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; .first()
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another", caption == Some("Enclosed ].".to_string()) }; .unwrap()
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; .as_container()
Tex { mathmode == true, tex == "1+1=2", env == "main", caption == Some("Some, text\\".to_string()) }; .unwrap()
Tex { mathmode == false, tex == "Non Math \\LaTeX", env == "another", caption == Some("Enclosed ].".to_string()) }; .contained()
Tex { mathmode == true, tex == "e^{i\\pi}=-1", env == "another" }; .iter()
}; .filter_map(|e| e.downcast_ref::<Tex>())
); .collect::<Vec<_>>();
assert_eq!(found[0].tex, "1+1=2");
assert_eq!(found[0].env, "main");
assert_eq!(found[0].caption, Some("Some, text\\".to_string()));
assert_eq!(found[1].tex, "Non Math \\LaTeX");
assert_eq!(found[1].env, "another");
assert_eq!(found[1].caption, Some("Enclosed ].".to_string()));
assert_eq!(found[2].tex, "e^{i\\pi}=-1");
assert_eq!(found[2].env, "another");
} }
} }

View file

@ -11,7 +11,7 @@ use crate::document::document::Document;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::lua::kernel::CTX; use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState; use crate::parser::parser::Parser;
use crate::parser::rule::Rule; use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Source; use crate::parser::source::Source;
@ -19,15 +19,15 @@ use crate::parser::source::Token;
#[derive(Debug)] #[derive(Debug)]
pub struct Text { pub struct Text {
pub location: Token, pub(self) location: Token,
pub content: String, pub(self) content: String,
} }
impl Text { impl Text {
pub fn new(location: Token, content: String) -> Text { pub fn new(location: Token, content: String) -> Text {
Text { Text {
location, location: location,
content, content: content,
} }
} }
} }
@ -36,45 +36,39 @@ impl Element for Text {
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Inline } fn kind(&self) -> ElemKind { ElemKind::Inline }
fn element_name(&self) -> &'static str { "Text" } fn element_name(&self) -> &'static str { "Text" }
fn to_string(&self) -> String { format!("{self:#?}") }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn compile(&self, compiler: &Compiler, _document: &dyn Document) -> Result<String, String> {
Ok(Compiler::sanitize(compiler.target(), self.content.as_str())) Ok(Compiler::sanitize(compiler.target(), self.content.as_str()))
} }
} }
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::text")] #[derive(Default)]
pub struct TextRule; pub struct TextRule;
impl TextRule {
pub fn new() -> Self { Self {} }
}
impl Rule for TextRule { impl Rule for TextRule {
fn name(&self) -> &'static str { "Text" } fn name(&self) -> &'static str { "Text" }
fn previous(&self) -> Option<&'static str> { Some("Link") }
fn next_match(&self, _state: &ParserState, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { fn next_match(&self, _cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { None }
None
}
fn on_match( fn on_match(
&self, &self,
_state: &ParserState, _parser: &dyn Parser,
_document: &dyn Document, _document: &dyn Document,
_cursor: Cursor, _cursor: Cursor,
_match_data: Box<dyn Any>, _match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) { ) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
panic!("Text cannot match"); panic!("Text cannot match");
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
let mut bindings = vec![]; let mut bindings = vec![];
bindings.push(( bindings.push((
"push".to_string(), "push".to_string(),
lua.create_function(|_, content: String| { lua.create_function(|_, content: String| {
CTX.with_borrow(|ctx| { CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
ctx.state.push( ctx.parser.push(
ctx.document, ctx.document,
Box::new(Text { Box::new(Text {
location: ctx.location.clone(), location: ctx.location.clone(),

View file

@ -1,43 +1,10 @@
use crate::document::document::Document; use mlua::{Function, Lua};
use crate::document::variable::BaseVariable;
use crate::document::variable::PathVariable;
use crate::document::variable::Variable;
use crate::lua::kernel::CTX;
use crate::parser::parser::ParserState;
use crate::parser::parser::ReportColors;
use crate::parser::rule::RegexRule;
use crate::parser::source::Source;
use crate::parser::source::Token;
use ariadne::Fmt;
use ariadne::Label;
use ariadne::Report;
use ariadne::ReportKind;
use mlua::Function;
use mlua::Lua;
use regex::Regex; use regex::Regex;
use std::ops::Range; use crate::{document::document::Document, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token}}};
use std::rc::Rc; use ariadne::{Report, Fmt, Label, ReportKind};
use std::str::FromStr; use crate::document::variable::{BaseVariable, PathVariable, Variable};
use std::{ops::Range, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum VariableKind {
Regular,
Path,
}
impl FromStr for VariableKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"regular" | "" => Ok(VariableKind::Regular),
"path" | "'" => Ok(VariableKind::Path),
_ => Err(format!("Uknnown variable kind: `{s}`")),
}
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::variable")]
pub struct VariableRule { pub struct VariableRule {
re: [Regex; 1], re: [Regex; 1],
kinds: Vec<(String, String)>, kinds: Vec<(String, String)>,
@ -47,56 +14,56 @@ impl VariableRule {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
re: [Regex::new(r"(?:^|\n)@([^[:alpha:]])?(.*?)=((?:\\\n|.)*)").unwrap()], re: [Regex::new(r"(?:^|\n)@([^[:alpha:]])?(.*?)=((?:\\\n|.)*)").unwrap()],
kinds: vec![("".into(), "Regular".into()), ("'".into(), "Path".into())], kinds: vec![
("".into(), "Regular".into()),
("'".into(), "Path".into())
]
} }
} }
pub fn make_variable( pub fn make_variable(&self, colors: &ReportColors, location: Token, kind: usize, name: String, value: String) -> Result<Rc<dyn Variable>, String>
&self, {
colors: &ReportColors, match self.kinds[kind].0.as_str()
location: Token, {
kind: usize, "" => {
name: String, Ok(Rc::new(BaseVariable::new(location, name, value)))
value: String, }
) -> Result<Rc<dyn Variable>, String> {
match self.kinds[kind].0.as_str() {
"" => Ok(Rc::new(BaseVariable::new(location, name, value))),
"'" => { "'" => {
match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize
{ {
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))), Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))),
Err(e) => Err(format!("Unable to canonicalize path `{}`: {}", Err(e) => Err(format!("Unable to canonicalize path `{}`: {}",
value.fg(colors.highlight), value.fg(colors.highlight),
e)) e.to_string()))
} }
} }
_ => panic!("Unhandled variable kind"), _ => panic!("Unhandled variable kind")
} }
} }
// Trim and check variable name for validity // Trim and check variable name for validity
pub fn validate_name<'a>( pub fn validate_name<'a>(colors: &ReportColors, original_name: &'a str) -> Result<&'a str, String>
colors: &ReportColors, {
original_name: &'a str,
) -> Result<&'a str, String> {
let name = original_name.trim_start().trim_end(); let name = original_name.trim_start().trim_end();
if name.contains("%") { if name.contains("%")
return Err(format!("Name cannot contain '{}'", "%".fg(colors.info))); {
return Err(format!("Name cannot contain '{}'",
"%".fg(colors.info)));
} }
Ok(name) return Ok(name);
} }
pub fn validate_value(original_value: &str) -> Result<String, String> { pub fn validate_value(_colors: &ReportColors, original_value: &str) -> Result<String, String>
{
let mut escaped = 0usize; let mut escaped = 0usize;
let mut result = String::new(); let mut result = String::new();
for c in original_value.trim_start().trim_end().chars() { for c in original_value.trim_start().trim_end().chars() {
if c == '\\' { if c == '\\' { escaped += 1 }
escaped += 1 else if c == '\n' {
} else if c == '\n' {
match escaped { match escaped {
0 => return Err("Unknown error wile capturing value".to_string()), 0 => return Err("Unknown error wile capturing variable".to_string()),
// Remove '\n' // Remove '\n'
1 => {} 1 => {},
// Insert '\n' // Insert '\n'
_ => { _ => {
result.push(c); result.push(c);
@ -104,7 +71,8 @@ impl VariableRule {
} }
} }
escaped = 0; escaped = 0;
} else { }
else {
(0..escaped).for_each(|_| result.push('\\')); (0..escaped).for_each(|_| result.push('\\'));
escaped = 0; escaped = 0;
result.push(c); result.push(c);
@ -118,58 +86,38 @@ impl VariableRule {
impl RegexRule for VariableRule { impl RegexRule for VariableRule {
fn name(&self) -> &'static str { "Variable" } fn name(&self) -> &'static str { "Variable" }
fn previous(&self) -> Option<&'static str> { Some("Element Style") }
fn regexes(&self) -> &[Regex] { &self.re } fn regexes(&self) -> &[Regex] { &self.re }
fn on_regex_match( fn on_regex_match<'a>(&self, _: usize, parser: &dyn Parser, document: &'a dyn Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>
&self, {
_: usize,
state: &ParserState,
document: &dyn Document,
token: Token,
matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut result = vec![]; let mut result = vec![];
// [Optional] variable kind // [Optional] variable kind
let var_kind = match matches.get(1) { let var_kind = match matches.get(1)
{
Some(kind) => { Some(kind) => {
// Find kind // Find kind
let r = self let r = self.kinds.iter().enumerate().find(|(_i, (ref char, ref _name))| {
.kinds char == kind.as_str() });
.iter()
.enumerate()
.find(|(_i, (ref char, ref _name))| char == kind.as_str());
// Unknown kind specified // Unknown kind specified
if r.is_none() { if r.is_none()
{
result.push( result.push(
Report::build(ReportKind::Error, token.source(), kind.start()) Report::build(ReportKind::Error, token.source(), kind.start())
.with_message("Unknown variable kind") .with_message("Unknown variable kind")
.with_label( .with_label(
Label::new((token.source(), kind.range())) Label::new((token.source(), kind.range()))
.with_message(format!( .with_message(format!("Variable kind `{}` is unknown",
"Variable kind `{}` is unknown", kind.as_str().fg(parser.colors().highlight)))
kind.as_str().fg(state.parser.colors().highlight) .with_color(parser.colors().error))
)) .with_help(format!("Leave empty for regular variables. Available variable kinds:{}",
.with_color(state.parser.colors().error), self.kinds.iter().skip(1).fold("".to_string(), |acc, (char, name)| {
) acc + format!("\n - `{}` : {}",
.with_help(format!( char.fg(parser.colors().highlight),
"Leave empty for regular variables. Available variable kinds:{}", name.fg(parser.colors().info)).as_str()
self.kinds.iter().skip(1).fold( })))
"".to_string(), .finish());
|acc, (char, name)| {
acc + format!(
"\n - `{}` : {}",
char.fg(state.parser.colors().highlight),
name.fg(state.parser.colors().info)
)
.as_str()
}
)
))
.finish(),
);
return result; return result;
} }
@ -179,8 +127,11 @@ impl RegexRule for VariableRule {
None => 0, None => 0,
}; };
let var_name = match matches.get(2) { let var_name = match matches.get(2)
Some(name) => match VariableRule::validate_name(state.parser.colors(), name.as_str()) { {
Some(name) => {
match VariableRule::validate_name(&parser.colors(), name.as_str())
{
Ok(var_name) => var_name, Ok(var_name) => var_name,
Err(msg) => { Err(msg) => {
result.push( result.push(
@ -188,23 +139,23 @@ impl RegexRule for VariableRule {
.with_message("Invalid variable name") .with_message("Invalid variable name")
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!("Variable name `{}` is not allowed. {msg}",
"Variable name `{}` is not allowed. {msg}", name.as_str().fg(parser.colors().highlight)))
name.as_str().fg(state.parser.colors().highlight) .with_color(parser.colors().error))
)) .finish());
.with_color(state.parser.colors().error),
)
.finish(),
);
return result; return result;
},
} }
}, },
_ => panic!("Unknown variable name"), _ => panic!("Unknown variable name")
}; };
let var_value = match matches.get(3) { let var_value = match matches.get(3)
Some(value) => match VariableRule::validate_value(value.as_str()) { {
Some(value) => {
match VariableRule::validate_value(&parser.colors(), value.as_str())
{
Ok(var_value) => var_value, Ok(var_value) => var_value,
Err(msg ) => { Err(msg ) => {
result.push( result.push(
@ -212,28 +163,20 @@ impl RegexRule for VariableRule {
.with_message("Invalid variable value") .with_message("Invalid variable value")
.with_label( .with_label(
Label::new((token.source(), value.range())) Label::new((token.source(), value.range()))
.with_message(format!( .with_message(format!("Variable value `{}` is not allowed. {msg}",
"Variable value `{}` is not allowed. {msg}", value.as_str().fg(parser.colors().highlight)))
value.as_str().fg(state.parser.colors().highlight) .with_color(parser.colors().error))
)) .finish());
.with_color(state.parser.colors().error),
)
.finish(),
);
return result; return result;
} }
}, }
_ => panic!("Invalid variable value"), }
_ => panic!("Invalid variable value")
}; };
match self.make_variable( match self.make_variable(&parser.colors(), token.clone(), var_kind, var_name.to_string(), var_value)
state.parser.colors(), {
token.clone(),
var_kind,
var_name.to_string(),
var_value,
) {
Ok(variable) => document.add_variable(variable), Ok(variable) => document.add_variable(variable),
Err(msg) => { Err(msg) => {
let m = matches.get(0).unwrap(); let m = matches.get(0).unwrap();
@ -242,63 +185,25 @@ impl RegexRule for VariableRule {
.with_message("Unable to create variable") .with_message("Unable to create variable")
.with_label( .with_label(
Label::new((token.source(), m.start()+1 .. m.end() )) Label::new((token.source(), m.start()+1 .. m.end() ))
.with_message(format!( .with_message(format!("Unable to create variable `{}`. {}",
"Unable to create variable `{}`. {}", var_name.fg(parser.colors().highlight),
var_name.fg(state.parser.colors().highlight), msg))
msg .with_color(parser.colors().error))
)) .finish());
.with_color(state.parser.colors().error),
)
.finish(),
);
return result; return result;
} }
} }
result return result;
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { // TODO
let mut bindings = vec![]; fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
bindings.push(( }
"insert".to_string(),
lua.create_function(|_, (name, value): (String, String)| {
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
let var = Rc::new(BaseVariable::new(ctx.location.clone(), name, value));
ctx.document.add_variable(var);
})
});
Ok(()) pub struct VariableSubstitutionRule
})
.unwrap(),
));
bindings.push((
"get".to_string(),
lua.create_function(|_, name: String| {
let mut value: Option<String> = None;
CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| {
if let Some(var) = ctx.document.get_variable(name.as_str())
{ {
value = Some(var.to_string());
}
})
});
Ok(value)
})
.unwrap(),
));
bindings
}
}
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::variable")]
pub struct VariableSubstitutionRule {
re: [Regex; 1], re: [Regex; 1],
} }
@ -310,113 +215,107 @@ impl VariableSubstitutionRule {
} }
} }
impl RegexRule for VariableSubstitutionRule { impl RegexRule for VariableSubstitutionRule
{
fn name(&self) -> &'static str { "Variable Substitution" } fn name(&self) -> &'static str { "Variable Substitution" }
fn previous(&self) -> Option<&'static str> { Some("Variable") }
fn regexes(&self) -> &[regex::Regex] { &self.re } fn regexes(&self) -> &[regex::Regex] { &self.re }
fn on_regex_match<'a>( fn on_regex_match<'a>(&self, _index: usize, parser: &dyn Parser, document: &'a dyn Document<'a>, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
&self,
_index: usize,
state: &ParserState,
document: &'a dyn Document<'a>,
token: Token,
matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut result = vec![]; let mut result = vec![];
let variable = match matches.get(1) { let variable = match matches.get(1)
{
Some(name) => { Some(name) => {
// Empty name // Empty name
if name.as_str().is_empty() { if name.as_str().is_empty()
{
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Empty variable name") .with_message("Empty variable name")
.with_label( .with_label(
Label::new((token.source(), matches.get(0).unwrap().range())) Label::new((token.source(), matches.get(0).unwrap().range()))
.with_message("Missing variable name for substitution".to_string()) .with_message(format!("Missing variable name for substitution"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error))
) .finish());
.finish(),
);
return result; return result;
} }
// Leading spaces // Leading spaces
else if name.as_str().trim_start() != name.as_str() { else if name.as_str().trim_start() != name.as_str()
{
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name") .with_message("Invalid variable name")
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message("Variable names contains leading spaces".to_string()) .with_message(format!("Variable names contains leading spaces"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error))
)
.with_help("Remove leading spaces") .with_help("Remove leading spaces")
.finish(), .finish());
);
return result; return result;
} }
// Trailing spaces // Trailing spaces
else if name.as_str().trim_end() != name.as_str() { else if name.as_str().trim_end() != name.as_str()
{
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name") .with_message("Invalid variable name")
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message("Variable names contains trailing spaces".to_string()) .with_message(format!("Variable names contains trailing spaces"))
.with_color(state.parser.colors().error), .with_color(parser.colors().error))
)
.with_help("Remove trailing spaces") .with_help("Remove trailing spaces")
.finish(), .finish());
);
return result; return result;
} }
// Invalid name // Invalid name
if let Err(msg) = VariableRule::validate_name(state.parser.colors(), name.as_str()) { match VariableRule::validate_name(&parser.colors(), name.as_str())
{
Err(msg) =>
{
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Invalid variable name") .with_message("Invalid variable name")
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(msg) .with_message(msg)
.with_color(state.parser.colors().error), .with_color(parser.colors().error))
) .finish());
.finish(),
);
return result; return result;
} }
_ => {},
}
// Get variable // Get variable
match document.get_variable(name.as_str()) { match document.get_variable(name.as_str())
{
None => { None => {
result.push( result.push(
Report::build(ReportKind::Error, token.source(), name.start()) Report::build(ReportKind::Error, token.source(), name.start())
.with_message("Unknown variable name") .with_message("Unknown variable name")
.with_label( .with_label(
Label::new((token.source(), name.range())) Label::new((token.source(), name.range()))
.with_message(format!( .with_message(format!("Unable to find variable with name: `{}`",
"Unable to find variable with name: `{}`", name.as_str().fg(parser.colors().highlight)))
name.as_str().fg(state.parser.colors().highlight) .with_color(parser.colors().error))
)) .finish());
.with_color(state.parser.colors().error),
)
.finish(),
);
return result; return result;
} }
Some(var) => var, Some(var) => var,
} }
} },
_ => panic!("Unknown error"), _ => panic!("Unknown error")
}; };
variable.parse(state, token, document); variable.parse(token, parser, document);
result return result;
} }
// TODO
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
} }

View file

@ -1,6 +1,6 @@
use std::{cell::{Ref, RefCell, RefMut}, collections::HashMap, rc::Rc}; use std::{cell::{RefCell, RefMut}, collections::HashMap, rc::Rc};
use crate::{document::{customstyle::{CustomStyle, CustomStyleHolder}, document::Document, element::Element, layout::{LayoutHolder, LayoutType}, style::{ElementStyle, StyleHolder}}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}}; use crate::{document::{document::Document, element::Element}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LineCursor pub struct LineCursor
@ -94,3 +94,55 @@ impl From<&LineCursor> for Cursor
} }
} }
} }
#[derive(Debug)]
pub struct LsParser
{
rules: Vec<Box<dyn Rule>>,
colors: ReportColors,
// Parser state
pub state: RefCell<StateHolder>,
pub kernels: RefCell<HashMap<String, Kernel>>,
}
impl Parser for LsParser
{
fn colors(&self) -> &ReportColors { &self.colors }
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
fn has_error(&self) -> bool { true }
fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>) {
todo!()
}
fn parse<'a>(&self, source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Box<dyn Document<'a>+'a> {
todo!()
}
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
todo!()
}
}
impl KernelHolder for LsParser
{
fn get_kernel(&self, name: &str)
-> Option<RefMut<'_, Kernel>> {
RefMut::filter_map(self.kernels.borrow_mut(),
|map| map.get_mut(name)).ok()
}
fn insert_kernel(&self, name: String, kernel: Kernel)
-> RefMut<'_, Kernel> {
//TODO do not get
self.kernels.borrow_mut()
.insert(name.clone(), kernel);
self.get_kernel(name.as_str()).unwrap()
}
}

View file

@ -1,42 +1,47 @@
use std::cell::RefCell; use std::cell::{RefCell, RefMut};
use std::collections::HashMap;
use mlua::Lua; use mlua::Lua;
use crate::document::document::Document; use crate::{document::document::Document, parser::{parser::Parser, source::Token}};
use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::source::Token;
pub struct KernelContext<'a, 'b, 'c> { pub struct KernelContext<'a, 'b>
{
pub location: Token, pub location: Token,
pub state: &'a ParserState<'a, 'b>, pub parser: &'a dyn Parser,
pub document: &'c dyn Document<'c>, pub document: &'b dyn Document<'b>,
//pub parser: &'a dyn Parser,
} }
thread_local! { thread_local! {
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) }; pub static CTX: RefCell<Option<KernelContext<'static, 'static>>> = RefCell::new(None);
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Kernel { pub struct Kernel
{
lua: Lua, lua: Lua,
} }
impl Kernel { impl Kernel {
// TODO: Take parser as arg and
// iterate over the rules
// to find export the bindings (if some)
pub fn new(parser: &dyn Parser) -> Self { pub fn new(parser: &dyn Parser) -> Self {
let lua = Lua::new(); let lua = Lua::new();
{ {
let nml_table = lua.create_table().unwrap(); let nml_table = lua.create_table().unwrap();
for rule in parser.rules() { for rule in parser.rules()
{
let table = lua.create_table().unwrap(); let table = lua.create_table().unwrap();
// TODO: Export this so we can check for duplicate rules based on this name let name = rule.name().to_lowercase();
let name = rule.name().to_lowercase().replace(' ', "_");
for (fun_name, fun) in rule.register_bindings(&lua) { for (fun_name, fun) in rule.lua_bindings(&lua)
{
table.set(fun_name, fun).unwrap(); table.set(fun_name, fun).unwrap();
} }
nml_table.set(name, table).unwrap(); nml_table.set(name, table).unwrap();
} }
lua.globals().set("nml", nml_table).unwrap(); lua.globals().set("nml", nml_table).unwrap();
@ -49,9 +54,10 @@ impl Kernel {
/// ///
/// This is the only way lua code shoule be ran, because exported /// This is the only way lua code shoule be ran, because exported
/// functions may require the context in order to operate /// functions may require the context in order to operate
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F) -> T pub fn run_with_context<T, F>(&self, context: KernelContext, f: F)
-> T
where where
F: FnOnce(&Lua) -> T, F: FnOnce(&Lua) -> T
{ {
CTX.set(Some(unsafe { std::mem::transmute(context) })); CTX.set(Some(unsafe { std::mem::transmute(context) }));
let ret = f(&self.lua); let ret = f(&self.lua);
@ -61,15 +67,9 @@ impl Kernel {
} }
} }
#[derive(Default)] pub trait KernelHolder
pub struct KernelHolder { {
kernels: HashMap<String, Kernel>, fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>>;
}
impl KernelHolder { fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel>;
pub fn get(&self, kernel_name: &str) -> Option<&Kernel> { self.kernels.get(kernel_name) }
pub fn insert(&mut self, kernel_name: String, kernel: Kernel) {
self.kernels.insert(kernel_name, kernel);
}
} }

View file

@ -1,3 +1,4 @@
#![feature(char_indices_offset)]
mod cache; mod cache;
mod compiler; mod compiler;
mod document; mod document;
@ -8,13 +9,23 @@ mod parser;
use std::env; use std::env;
use std::io::BufWriter; use std::io::BufWriter;
use std::io::Write; use std::io::Write;
use std::path::PathBuf;
use std::process::ExitCode; use std::process::ExitCode;
use std::rc::Rc;
use std::time::UNIX_EPOCH;
use compiler::compiler::CompiledDocument;
use compiler::compiler::Compiler;
use compiler::compiler::Target; use compiler::compiler::Target;
use compiler::navigation::create_navigation; use compiler::navigation::create_navigation;
use document::document::Document;
use getopts::Options; use getopts::Options;
use parser::langparser::LangParser;
use parser::parser::Parser;
use rusqlite::Connection;
use walkdir::WalkDir; use walkdir::WalkDir;
use crate::parser::source::SourceFile;
extern crate getopts; extern crate getopts;
fn print_usage(program: &str, opts: Options) { fn print_usage(program: &str, opts: Options) {
@ -36,6 +47,122 @@ NML version: 0.4\n"
); );
} }
fn parse(input: &str, debug_opts: &Vec<String>) -> Result<Box<dyn Document<'static>>, String> {
println!("Parsing {input}...");
let parser = LangParser::default();
// Parse
let source = SourceFile::new(input.to_string(), None).unwrap();
let doc = parser.parse(Rc::new(source), None);
if debug_opts.contains(&"ast".to_string()) {
println!("-- BEGIN AST DEBUGGING --");
doc.content()
.borrow()
.iter()
.for_each(|elem| println!("{}", (elem).to_string()));
println!("-- END AST DEBUGGING --");
}
if debug_opts.contains(&"ref".to_string()) {
println!("-- BEGIN REFERENCES DEBUGGING --");
let sc = doc.scope().borrow();
sc.referenceable.iter().for_each(|(name, reference)| {
println!(" - {name}: `{:#?}`", doc.get_from_reference(reference));
});
println!("-- END REFERENCES DEBUGGING --");
}
if debug_opts.contains(&"var".to_string()) {
println!("-- BEGIN VARIABLES DEBUGGING --");
let sc = doc.scope().borrow();
sc.variables.iter().for_each(|(_name, var)| {
println!(" - `{:#?}`", var);
});
println!("-- END VARIABLES DEBUGGING --");
}
if parser.has_error() {
return Err("Parsing failed aborted due to errors while parsing".to_string());
}
Ok(doc)
}
fn process(
target: Target,
files: Vec<PathBuf>,
db_path: &Option<String>,
force_rebuild: bool,
debug_opts: &Vec<String>,
) -> Result<Vec<CompiledDocument>, String> {
let mut compiled = vec![];
let current_dir = std::env::current_dir()
.map_err(|err| format!("Unable to get the current working directory: {err}"))?;
let con = db_path
.as_ref()
.map_or(Connection::open_in_memory(), |path| Connection::open(path))
.map_err(|err| format!("Unable to open connection to the database: {err}"))?;
CompiledDocument::init_cache(&con)
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
for file in files {
let meta = std::fs::metadata(&file)
.map_err(|err| format!("Failed to get metadata for `{file:#?}`: {err}"))?;
let modified = meta
.modified()
.map_err(|err| format!("Unable to query modification time for `{file:#?}`: {err}"))?;
// Move to file's directory
let file_parent_path = file
.parent()
.ok_or(format!("Failed to get parent path for `{file:#?}`"))?;
std::env::set_current_dir(file_parent_path)
.map_err(|err| format!("Failed to move to path `{file_parent_path:#?}`: {err}"))?;
let parse_and_compile = || -> Result<CompiledDocument, String> {
// Parse
let doc = parse(file.to_str().unwrap(), debug_opts)?;
// Compile
let compiler = Compiler::new(target, db_path.clone());
let mut compiled = compiler.compile(&*doc);
// Insert into cache
compiled.mtime = modified.duration_since(UNIX_EPOCH).unwrap().as_secs();
compiled.insert_cache(&con).map_err(|err| {
format!("Failed to insert compiled document from `{file:#?}` into cache: {err}")
})?;
Ok(compiled)
};
let cdoc = if force_rebuild {
parse_and_compile()?
} else {
match CompiledDocument::from_cache(&con, file.to_str().unwrap()) {
Some(compiled) => {
if compiled.mtime < modified.duration_since(UNIX_EPOCH).unwrap().as_secs() {
parse_and_compile()?
} else {
compiled
}
}
None => parse_and_compile()?,
}
};
compiled.push(cdoc);
}
std::env::set_current_dir(current_dir)
.map_err(|err| format!("Failed to set current directory: {err}"))?;
Ok(compiled)
}
fn main() -> ExitCode { fn main() -> ExitCode {
let args: Vec<String> = env::args().collect(); let args: Vec<String> = env::args().collect();
let program = args[0].clone(); let program = args[0].clone();
@ -72,7 +199,7 @@ fn main() -> ExitCode {
let input_meta = match std::fs::metadata(&input) { let input_meta = match std::fs::metadata(&input) {
Ok(meta) => meta, Ok(meta) => meta,
Err(e) => { Err(e) => {
eprintln!("Unable to get metadata for input `{input}`: {e}"); eprintln!("Unable to get metadata for input: `{input}`");
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
}; };
@ -89,14 +216,9 @@ fn main() -> ExitCode {
} }
} }
match std::fs::metadata(&output) { match std::fs::metadata(&output) {
Ok(output_meta) => { Ok(_) => {}
if !output_meta.is_dir() {
eprintln!("Input is a directory, but ouput is not a directory, halting");
return ExitCode::FAILURE;
}
}
Err(e) => { Err(e) => {
eprintln!("Unable to get metadata for output `{output}`: {e}"); eprintln!("Unable to get metadata for output: `{output}`");
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
} }
@ -104,7 +226,7 @@ fn main() -> ExitCode {
let output_meta = match std::fs::metadata(&output) { let output_meta = match std::fs::metadata(&output) {
Ok(meta) => meta, Ok(meta) => meta,
Err(e) => { Err(e) => {
eprintln!("Unable to get metadata for output `{output}`: {e}"); eprintln!("Unable to get metadata for output: `{output}`");
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
}; };
@ -180,7 +302,7 @@ fn main() -> ExitCode {
} }
} }
Err(e) => { Err(e) => {
eprintln!("Faield to get metadata for `{entry:#?}`: {e}"); eprintln!("Faield to get metadata for `{entry:#?}`");
return ExitCode::FAILURE; return ExitCode::FAILURE;
} }
} }
@ -213,10 +335,8 @@ fn main() -> ExitCode {
} }
// Parse, compile using the cache // Parse, compile using the cache
let processed = let compiled = match process(Target::HTML, files, &db_path, force_rebuild, &debug_opts) {
match compiler::process::process(Target::HTML, files, &db_path, force_rebuild, &debug_opts) Ok(compiled) => compiled,
{
Ok(processed) => processed,
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");
return ExitCode::FAILURE; return ExitCode::FAILURE;
@ -227,7 +347,7 @@ fn main() -> ExitCode {
// Batch mode // Batch mode
{ {
// Build navigation // Build navigation
let navigation = match create_navigation(&processed) { let navigation = match create_navigation(&compiled) {
Ok(nav) => nav, Ok(nav) => nav,
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");
@ -236,54 +356,38 @@ fn main() -> ExitCode {
}; };
// Output // Output
for (doc, _) in &processed { for doc in compiled {
let out_path = match doc let out_path = match doc
.borrow()
.get_variable("compiler.output") .get_variable("compiler.output")
.or(input_meta.is_file().then_some(&output)) .or(input_meta.is_file().then_some(&output))
{ {
Some(path) => path.clone(), Some(path) => path.clone(),
None => { None => {
eprintln!("Unable to get output file for `{}`", doc.borrow().input); eprintln!("Unable to get output file for `{}`", doc.input);
continue; continue;
} }
}; };
let nav = navigation.compile(Target::HTML, doc); let nav = navigation.compile(Target::HTML, &doc);
let file = std::fs::File::create(output.clone() + "/" + out_path.as_str()).unwrap(); let file = std::fs::File::create(output.clone() + "/" + out_path.as_str()).unwrap();
let mut writer = BufWriter::new(file); let mut writer = BufWriter::new(file);
write!( write!(writer, "{}{}{}{}", doc.header, nav, doc.body, doc.footer).unwrap();
writer,
"{}{}{}{}",
doc.borrow().header,
nav,
doc.borrow().body,
doc.borrow().footer
)
.unwrap();
writer.flush().unwrap(); writer.flush().unwrap();
} }
} else } else
// Single file // Single file
{ {
for (doc, _) in &processed { for doc in compiled {
let file = std::fs::File::create(output.clone()).unwrap(); let file = std::fs::File::create(output.clone()).unwrap();
let mut writer = BufWriter::new(file); let mut writer = BufWriter::new(file);
write!( write!(writer, "{}{}{}", doc.header, doc.body, doc.footer).unwrap();
writer,
"{}{}{}",
doc.borrow().header,
doc.borrow().body,
doc.borrow().footer
)
.unwrap();
writer.flush().unwrap(); writer.flush().unwrap();
} }
} }
ExitCode::SUCCESS return ExitCode::SUCCESS;
} }

View file

@ -1,62 +0,0 @@
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use std::ops::Deref;
use ariadne::Report;
use crate::document::document::Document;
use crate::parser::source::Source;
use crate::parser::source::Token;
use super::parser::ParserState;
#[derive(Debug, PartialEq, Eq)]
pub enum CustomStyleToken {
Toggle(String),
Pair(String, String),
}
pub trait CustomStyle: core::fmt::Debug {
/// Name for the custom style
fn name(&self) -> &str;
/// Gets the begin and end token for a custom style
fn tokens(&self) -> &CustomStyleToken;
fn on_start<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
fn on_end<'a>(
&self,
location: Token,
state: &ParserState,
document: &'a (dyn Document<'a> + 'a),
) -> Vec<Report<(Rc<dyn Source>, Range<usize>)>>;
}
#[derive(Default)]
pub struct CustomStyleHolder {
custom_styles: HashMap<String, Rc<dyn CustomStyle>>,
}
impl CustomStyleHolder {
pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
self.custom_styles
.get(style_name).cloned()
}
pub fn insert(&mut self, style: Rc<dyn CustomStyle>) {
self.custom_styles.insert(style.name().into(), style);
}
}
impl Deref for CustomStyleHolder {
type Target = HashMap<String, Rc<dyn CustomStyle>>;
fn deref(&self) -> &Self::Target {
&self.custom_styles
}
}

View file

@ -1,18 +1,35 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use ariadne::Label;
use ariadne::Report;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::DocumentEnd; use crate::document::element::DocumentEnd;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::document::langdocument::LangDocument; use crate::document::langdocument::LangDocument;
use crate::elements::paragraph::Paragraph;
use crate::elements::registrar::register;
use crate::elements::text::Text; use crate::elements::text::Text;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use super::parser::Parser; use super::parser::Parser;
use super::parser::ParserState;
use super::parser::ReportColors; use super::parser::ReportColors;
use super::rule::Rule; use super::rule::Rule;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source; use super::source::Source;
use super::source::Token; use super::source::Token;
use super::state::StateHolder;
use super::util; use super::util;
/// Parser for the language /// Parser for the language
@ -23,6 +40,8 @@ pub struct LangParser {
// Parser state // Parser state
pub err_flag: RefCell<bool>, pub err_flag: RefCell<bool>,
pub state: RefCell<StateHolder>,
pub kernels: RefCell<HashMap<String, Kernel>>,
} }
impl LangParser { impl LangParser {
@ -31,15 +50,72 @@ impl LangParser {
rules: vec![], rules: vec![],
colors: ReportColors::with_colors(), colors: ReportColors::with_colors(),
err_flag: RefCell::new(false), err_flag: RefCell::new(false),
state: RefCell::new(StateHolder::new()),
kernels: RefCell::new(HashMap::new()),
}; };
register(&mut s);
// Register rules s.kernels
for rule in super::rule::get_rule_registry() .borrow_mut()
{ .insert("main".to_string(), Kernel::new(&s));
s.add_rule(rule).unwrap(); s
} }
s fn handle_reports<'a>(
&self,
_source: Rc<dyn Source>,
reports: Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>,
) {
for mut report in reports {
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
sources.insert(source.clone());
match source.location() {
Some(parent) => {
let parent_source = parent.source();
if sources.get(&parent_source).is_none() {
recurse_source(sources, parent_source);
}
}
None => {}
}
}
report.labels.iter().for_each(|label| {
recurse_source(&mut sources, label.span.0.clone());
});
let cache = sources
.iter()
.map(|source| (source.clone(), source.content().clone()))
.collect::<Vec<(Rc<dyn Source>, String)>>();
cache.iter().for_each(|(source, _)| {
if let Some(location) = source.location() {
if let Some(_s) = source.downcast_ref::<SourceFile>() {
report.labels.push(
Label::new((location.source(), location.start() + 1..location.end()))
.with_message("In file included from here")
.with_order(-1),
);
};
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
let start = location.start()
+ (location.source().content().as_bytes()[location.start()]
== '\n' as u8)
.then_some(1)
.unwrap_or(0);
report.labels.push(
Label::new((location.source(), start..location.end()))
.with_message("In evaluation of")
.with_order(-1),
);
};
}
});
report.eprint(ariadne::sources(cache)).unwrap()
}
} }
} }
@ -49,15 +125,50 @@ impl Parser for LangParser {
fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules } fn rules(&self) -> &Vec<Box<dyn Rule>> { &self.rules }
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules } fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>> { &mut self.rules }
fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() }
fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() }
fn has_error(&self) -> bool { *self.err_flag.borrow() } fn has_error(&self) -> bool { *self.err_flag.borrow() }
fn parse<'p, 'a, 'doc>( /// Add an [`Element`] to the [`Document`]
&'p self, fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>) {
state: ParserState<'p, 'a>, if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
let mut paragraph = doc
.last_element_mut::<Paragraph>()
.or_else(|| {
doc.push(Box::new(Paragraph {
location: elem.location().clone(),
content: Vec::new(),
}));
doc.last_element_mut::<Paragraph>()
})
.unwrap();
paragraph.push(elem).unwrap();
} else {
// Process paragraph events
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
self.handle_reports(
doc.source(),
self.state_mut()
.on_scope_end(self, doc, super::state::Scope::PARAGRAPH),
);
}
doc.push(elem);
}
}
fn parse<'a>(
&self,
source: Rc<dyn Source>, source: Rc<dyn Source>,
parent: Option<&'doc dyn Document<'doc>>, parent: Option<&'a dyn Document<'a>>,
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) { ) -> Box<dyn Document<'a> + 'a> {
let doc = LangDocument::new(source.clone(), parent); let doc = LangDocument::new(source.clone(), parent);
let mut matches = Vec::new();
for _ in 0..self.rules.len() {
matches.push((0usize, None));
}
let content = source.content(); let content = source.content();
let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file
@ -65,21 +176,21 @@ impl Parser for LangParser {
if let Some(parent) = parent if let Some(parent) = parent
// Terminate parent's paragraph state // Terminate parent's paragraph state
{ {
self.handle_reports(state.shared.rule_state.borrow_mut().on_scope_end( self.handle_reports(
&state, parent.source(),
parent, self.state_mut()
super::state::Scope::PARAGRAPH, .on_scope_end(self, parent, super::state::Scope::PARAGRAPH),
)); );
} }
loop { loop {
let (rule_pos, mut result) = state.update_matches(&cursor); let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
// Unmatched content // Unmatched content
let text_content = let text_content =
util::process_text(&doc, &content.as_str()[cursor.pos..rule_pos.pos]); util::process_text(&doc, &content.as_str()[cursor.pos..rule_pos.pos]);
if !text_content.is_empty() { if !text_content.is_empty() {
state.push( self.push(
&doc, &doc,
Box::new(Text::new( Box::new(Text::new(
Token::new(cursor.pos..rule_pos.pos, source.clone()), Token::new(cursor.pos..rule_pos.pos, source.clone()),
@ -88,13 +199,12 @@ impl Parser for LangParser {
); );
} }
if let Some((rule_index, match_data)) = result.take() { if let Some(rule) = rule {
// Rule callback // Rule callback
let dd: &'a dyn Document = unsafe { std::mem::transmute(&doc as &dyn Document) }; let dd: &'a dyn Document = unsafe { std::mem::transmute(&doc as &dyn Document) };
let (new_cursor, reports) = let (new_cursor, reports) = rule.on_match(self, dd, rule_pos, match_data);
self.rules[rule_index].on_match(&state, dd, rule_pos, match_data);
self.handle_reports(reports); self.handle_reports(doc.source(), reports);
// Advance // Advance
cursor = new_cursor; cursor = new_cursor;
@ -105,14 +215,14 @@ impl Parser for LangParser {
} }
} }
// Rule States // State
self.handle_reports(state.shared.rule_state.borrow_mut().on_scope_end( self.handle_reports(
&state, doc.source(),
&doc, self.state_mut()
super::state::Scope::DOCUMENT, .on_scope_end(self, &doc, super::state::Scope::DOCUMENT),
)); );
state.push( self.push(
&doc, &doc,
Box::new(DocumentEnd(Token::new( Box::new(DocumentEnd(Token::new(
doc.source().content().len()..doc.source().content().len(), doc.source().content().len()..doc.source().content().len(),
@ -120,26 +230,26 @@ impl Parser for LangParser {
))), ))),
); );
(Box::new(doc), state) return Box::new(doc);
}
fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>) {
let mut matches = Vec::new();
for _ in 0..self.rules.len() {
matches.push((0usize, None));
} }
fn parse_into<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
source: Rc<dyn Source>,
document: &'doc dyn Document<'doc>,
) -> ParserState<'p, 'a> {
let content = source.content(); let content = source.content();
let mut cursor = Cursor::new(0usize, source.clone()); let mut cursor = Cursor::new(0usize, source.clone());
loop { loop {
let (rule_pos, mut result) = state.update_matches(&cursor); let (rule_pos, rule, match_data) = self.update_matches(&cursor, &mut matches);
// Unmatched content // Unmatched content
let text_content = let text_content =
util::process_text(document, &content.as_str()[cursor.pos..rule_pos.pos]); util::process_text(document, &content.as_str()[cursor.pos..rule_pos.pos]);
if !text_content.is_empty() { if !text_content.is_empty() {
state.push( self.push(
document, document,
Box::new(Text::new( Box::new(Text::new(
Token::new(cursor.pos..rule_pos.pos, source.clone()), Token::new(cursor.pos..rule_pos.pos, source.clone()),
@ -148,12 +258,11 @@ impl Parser for LangParser {
); );
} }
if let Some((rule_index, match_data)) = result.take() { if let Some(rule) = rule {
// Rule callback // Rule callback
let (new_cursor, reports) = let (new_cursor, reports) = (*rule).on_match(self, document, rule_pos, match_data);
self.rules[rule_index].on_match(&state, document, rule_pos, match_data);
self.handle_reports(reports); self.handle_reports(document.source(), reports);
// Advance // Advance
cursor = new_cursor; cursor = new_cursor;
@ -164,7 +273,6 @@ impl Parser for LangParser {
} }
} }
state
// State // State
//self.handle_reports(source.clone(), //self.handle_reports(source.clone(),
// self.state_mut().on_scope_end(&self, &document, super::state::Scope::DOCUMENT)); // self.state_mut().on_scope_end(&self, &document, super::state::Scope::DOCUMENT));
@ -172,3 +280,15 @@ impl Parser for LangParser {
//return doc; //return doc;
} }
} }
impl KernelHolder for LangParser {
fn get_kernel(&self, name: &str) -> Option<RefMut<'_, Kernel>> {
RefMut::filter_map(self.kernels.borrow_mut(), |map| map.get_mut(name)).ok()
}
fn insert_kernel(&self, name: String, kernel: Kernel) -> RefMut<'_, Kernel> {
//TODO do not get
self.kernels.borrow_mut().insert(name.clone(), kernel);
self.get_kernel(name.as_str()).unwrap()
}
}

View file

@ -1,45 +0,0 @@
use std::any::Any;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
use crate::compiler::compiler::Compiler;
use crate::document::document::Document;
use crate::elements::layout::LayoutToken;
/// Represents the type of a layout
pub trait LayoutType: core::fmt::Debug {
/// Name of the layout
fn name(&self) -> &'static str;
/// Parses layout properties
fn parse_properties(&self, properties: &str) -> Result<Option<Box<dyn Any>>, String>;
/// Expected number of blocks
fn expects(&self) -> Range<usize>;
/// Compile layout
fn compile(
&self,
token: LayoutToken,
id: usize,
properties: &Option<Box<dyn Any>>,
compiler: &Compiler,
document: &dyn Document,
) -> Result<String, String>;
}
#[derive(Default)]
pub struct LayoutHolder {
layouts: HashMap<String, Rc<dyn LayoutType>>,
}
impl LayoutHolder {
pub fn get(&self, layout_name: &str) -> Option<Rc<dyn LayoutType>> {
self.layouts.get(layout_name).cloned()
}
pub fn insert(&mut self, layout: Rc<dyn LayoutType>) {
self.layouts.insert(layout.name().into(), layout);
}
}

View file

@ -4,6 +4,3 @@ pub mod rule;
pub mod source; pub mod source;
pub mod state; pub mod state;
pub mod util; pub mod util;
pub mod style;
pub mod layout;
pub mod customstyle;

View file

@ -1,29 +1,16 @@
use ariadne::Label;
use ariadne::Report;
use std::any::Any; use std::any::Any;
use std::cell::RefCell; use std::cell::Ref;
use std::collections::HashSet; use std::cell::RefMut;
use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use super::customstyle::CustomStyleHolder;
use super::layout::LayoutHolder;
use super::rule::Rule; use super::rule::Rule;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source; use super::source::Source;
use super::state::RuleStateHolder; use super::state::StateHolder;
use super::style::StyleHolder;
use crate::document::document::Document; use crate::document::document::Document;
use crate::document::document::DocumentAccessors;
use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element; use crate::document::element::Element;
use crate::elements::paragraph::Paragraph;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder; use crate::lua::kernel::KernelHolder;
use crate::parser::source::SourceFile;
use crate::parser::source::VirtualSource;
use ariadne::Color; use ariadne::Color;
#[derive(Debug)] #[derive(Debug)]
@ -54,141 +41,79 @@ impl ReportColors {
} }
} }
/// The state that is shared with the state's childre pub trait Parser: KernelHolder {
pub struct SharedState { /// Gets the colors for formatting errors
pub rule_state: RefCell<RuleStateHolder>, ///
/// When colors are disabled, all colors should resolve to empty string
fn colors(&self) -> &ReportColors;
/// The lua [`Kernel`]s fn rules(&self) -> &Vec<Box<dyn Rule>>;
pub kernels: RefCell<KernelHolder>, fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
/// The styles fn add_rule(&mut self, rule: Box<dyn Rule>, after: Option<&'static str>) -> Result<(), String> {
pub styles: RefCell<StyleHolder>, // Error on duplicate rule
let rule_name = (*rule).name();
/// The layouts if let Err(e) = self.rules().iter().try_for_each(|rule| {
pub layouts: RefCell<LayoutHolder>, if (*rule).name() != rule_name {
return Ok(());
/// The custom styles
pub custom_styles: RefCell<CustomStyleHolder>,
} }
impl SharedState { return Err(format!(
/// Construct a new empty shared state "Attempted to introduce duplicate rule: `{rule_name}`"
pub(self) fn new(parser: &dyn Parser) -> Self { ));
let s = Self { }) {
rule_state: RefCell::new(RuleStateHolder::default()), return Err(e);
kernels: RefCell::new(KernelHolder::default()),
styles: RefCell::new(StyleHolder::default()),
layouts: RefCell::new(LayoutHolder::default()),
custom_styles: RefCell::new(CustomStyleHolder::default()),
};
// Register default kernel
s.kernels
.borrow_mut()
.insert("main".to_string(), Kernel::new(parser));
// Default styles & layouts
parser.rules().iter().for_each(|rule| {
rule.register_styles(&mut s.styles.borrow_mut());
rule.register_layouts(&mut s.layouts.borrow_mut());
});
s
}
} }
/// The state of the parser match after {
pub struct ParserState<'a, 'b> { Some(name) => {
/// The parser for which this state exists let before = self
pub parser: &'a dyn Parser,
/// The (optional) parent state
parent: Option<&'b ParserState<'a, 'b>>,
/// The position of the matches in the current state
matches: RefCell<Vec<(usize, Option<Box<dyn Any>>)>>,
/// State shared among all states
pub shared: Rc<SharedState>,
}
/// Represents the state of the parser
///
/// This state has some shared data from [`SharedState`] which gets shared
/// with the children of that state, see [`ParserState::with_state`]
impl<'a, 'b> ParserState<'a, 'b> {
/// Constructs a new state for a given parser with an optional parent
///
/// Parent should be None when parsing a brand new document. If you have to
/// set the parent to Some(..) (e.g for imports or sub-document), be sure
/// to use the [`ParserState::with_state`] method instead, this create a
/// RAII lived state for use within bounded lifetime.
pub fn new(parser: &'a dyn Parser, parent: Option<&'a ParserState<'a, 'b>>) -> Self {
let matches = parser.rules().iter().map(|_| (0, None)).collect::<Vec<_>>();
let shared = if let Some(parent) = &parent {
parent.shared.clone()
} else {
Rc::new(SharedState::new(parser))
};
Self {
parser,
parent,
matches: RefCell::new(matches),
shared,
}
}
/// Runs a procedure with a new state that inherits the [`SharedState`] state from [`self`]
///
/// Note: When parsing a new document, create a new state, then the parsing process
/// creates states using this method
pub fn with_state<F, R>(&self, f: F) -> R
where
F: FnOnce(ParserState) -> R,
{
let new_state = ParserState::new(self.parser, Some(self));
f(new_state)
}
/// Updates matches from a given start position e.g [`Cursor`]
///
/// # Return
///
/// 1. The cursor position after updating the matches
/// 2. (Optional) The winning match with it's match data
/// If the winning match is None, it means that the document has no more
/// rule to match. I.e The rest of the content should be added as a
/// [`Text`] element.
/// The match data should be passed to the [`Rule::on_match`] method.
///
/// # Strategy
///
/// This function call [`Rule::next_match`] on the rules defined for the
/// parser. It then takes the rule that has the closest `next_match` and
/// returns it. If next_match starts on an escaped character i.e `\\`,
/// then it starts over to find another match for that rule.
/// In case multiple rules have the same `next_match`, the rules that are
/// defined first in the parser are prioritized. See [Parser::add_rule] for
/// information on how to prioritize rules.
///
/// Notes that the result of every call to [`Rule::next_match`] gets stored
/// in a table: [`ParserState::matches`]. Until the cursor steps over a
/// position in the table, `next_match` won't be called.
pub fn update_matches(&self, cursor: &Cursor) -> (Cursor, Option<(usize, Box<dyn Any>)>) {
let mut matches_borrow = self.matches.borrow_mut();
self.parser
.rules() .rules()
.iter() .iter()
.zip(matches_borrow.iter_mut()) .enumerate()
.find(|(_pos, r)| (r).name() == name);
match before {
Some((pos, _)) => self.rules_mut().insert(pos + 1, rule),
_ => {
return Err(format!(
"Unable to find rule named `{name}`, to insert rule `{}` after it",
rule.name()
))
}
}
}
_ => self.rules_mut().push(rule),
}
Ok(())
}
fn state(&self) -> Ref<'_, StateHolder>;
fn state_mut(&self) -> RefMut<'_, StateHolder>;
fn has_error(&self) -> bool;
// Update [`matches`] and returns the position of the next matched rule.
// If rule is empty, it means that there are no rules left to parse (i.e
// end of document).
fn update_matches(
&self,
cursor: &Cursor,
matches: &mut Vec<(usize, Option<Box<dyn Any>>)>,
) -> (Cursor, Option<&Box<dyn Rule>>, Option<Box<dyn Any>>) {
// Update matches
// TODO: Trivially parellalizable
self.rules()
.iter()
.zip(matches.iter_mut())
.for_each(|(rule, (matched_at, match_data))| { .for_each(|(rule, (matched_at, match_data))| {
// Don't upate if not stepped over yet // Don't upate if not stepped over yet
if *matched_at > cursor.pos { if *matched_at > cursor.pos {
return; return;
} }
(*matched_at, *match_data) = match rule.next_match(self, cursor) { (*matched_at, *match_data) = match rule.next_match(cursor) {
None => (usize::MAX, None), None => (usize::MAX, None),
Some((mut pos, mut data)) => { Some((mut pos, mut data)) => {
// Check if escaped // Check if escaped
@ -198,7 +123,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
let mut escaped = false; let mut escaped = false;
'inner: loop { 'inner: loop {
let g = graphemes.next_back(); let g = graphemes.next_back();
if g.is_none() || g.unwrap() != "\\" { if !g.is_some() || g.unwrap() != "\\" {
break 'inner; break 'inner;
} }
@ -209,7 +134,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
} }
// Find next potential match // Find next potential match
(pos, data) = match rule.next_match(self, &cursor.at(pos + 1)) { (pos, data) = match rule.next_match(&cursor.at(pos + 1)) {
Some((new_pos, new_data)) => (new_pos, new_data), Some((new_pos, new_data)) => (new_pos, new_data),
None => (usize::MAX, data), // Stop iterating None => (usize::MAX, data), // Stop iterating
} }
@ -221,216 +146,36 @@ impl<'a, 'b> ParserState<'a, 'b> {
}); });
// Get winning match // Get winning match
let (winner, next_pos) = matches_borrow let (winner, (next_pos, _match_data)) = matches
.iter() .iter()
.enumerate() .enumerate()
.min_by_key(|(_, (pos, _))| pos) .min_by_key(|(_, (pos, _match_data))| pos)
.map(|(winner, (pos, _))| (winner, *pos))
.unwrap(); .unwrap();
if *next_pos == usize::MAX
if next_pos == usize::MAX
// No rule has matched // No rule has matched
{ {
let content = cursor.source.content(); let content = cursor.source.content();
// No winners, i.e no matches left // No winners, i.e no matches left
return (cursor.at(content.len()), None); return (cursor.at(content.len()), None, None);
} }
( (
cursor.at(next_pos), cursor.at(*next_pos),
Some((winner, matches_borrow[winner].1.take().unwrap())), Some(&self.rules()[winner]),
std::mem::replace(&mut matches[winner].1, None),
) )
} }
/// Add an [`Element`] to the [`Document`] /// Add an [`Element`] to the [`Document`]
pub fn push(&self, doc: &dyn Document, elem: Box<dyn Element>) { fn push<'a>(&self, doc: &dyn Document, elem: Box<dyn Element>);
if elem.kind() == ElemKind::Inline || elem.kind() == ElemKind::Invisible {
let mut paragraph = doc
.last_element_mut::<Paragraph>()
.or_else(|| {
doc.push(Box::new(Paragraph {
location: elem.location().clone(),
content: Vec::new(),
}));
doc.last_element_mut::<Paragraph>()
})
.unwrap();
paragraph.push(elem).unwrap();
} else {
// Process paragraph events
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
self.parser
.handle_reports(self.shared.rule_state.borrow_mut().on_scope_end(
self,
doc,
super::state::Scope::PARAGRAPH,
));
}
doc.push(elem);
}
}
/// Resets the position and the match_data for a given rule. This is used
/// in order to have 'dynamic' rules that may not match at first, but their
/// matching rule is modified through the parsing process.
///
/// This function also recursively calls itself on it's `parent`, in order
/// to fully reset the match.
///
/// See [`CustomStyleRule`] for an example of how this is used.
///
/// # Error
///
/// Returns an error if `rule_name` was not found in the parser's ruleset.
pub fn reset_match(&self, rule_name: &str) -> Result<(), String>
{
if self.parser.rules().iter()
.zip(self.matches.borrow_mut().iter_mut())
.try_for_each(|(rule, (match_pos, match_data))| {
if rule.name() != rule_name { return Ok(()) }
*match_pos = 0;
match_data.take();
Err(())
}).is_ok()
{
return Err(format!("Could not find rule: {rule_name}"));
}
// Resurcively reset
if let Some(parent) = self.parent
{
return parent.reset_match(rule_name);
}
Ok(())
}
}
pub trait Parser {
/// Gets the colors for formatting errors
///
/// When colors are disabled, all colors should resolve to empty string
fn colors(&self) -> &ReportColors;
/// Gets a reference to all the [`Rule`]s defined for the parser
fn rules(&self) -> &Vec<Box<dyn Rule>>;
/// Gets a mutable reference to all the [`Rule`]s defined for the parser
fn rules_mut(&mut self) -> &mut Vec<Box<dyn Rule>>;
/// Whether the parser emitted an error during it's parsing process
fn has_error(&self) -> bool;
/// Parse [`Source`] into a new [`Document`] /// Parse [`Source`] into a new [`Document`]
/// fn parse<'a>(
/// # Errors &self,
///
/// This method will not fail because we try to optimistically recover from
/// parsing errors. However the resulting document should not get compiled
/// if an error has happenedn, see [`Parser::has_error()`] for reference
///
/// # Returns
///
/// This method returns the resulting [`Document`] after psrsing `source`,
/// note that the [`ParserState`] is only meant to perform testing and not
/// meant to be reused.
fn parse<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
source: Rc<dyn Source>, source: Rc<dyn Source>,
parent: Option<&'doc dyn Document<'doc>>, parent: Option<&'a dyn Document<'a>>,
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>); ) -> Box<dyn Document<'a> + 'a>;
/// Parse [`Source`] into an already existing [`Document`] /// Parse [`Source`] into an already existing [`Document`]
/// fn parse_into<'a>(&self, source: Rc<dyn Source>, document: &'a dyn Document<'a>);
/// # Errors
///
/// This method will not fail because we try to optimistically recover from
/// parsing errors. However the resulting document should not get compiled
/// if an error has happened see [`Parser::has_error()`] for reference
///
/// # Returns
///
/// The returned [`ParserState`] is not meant to be reused, it's meant for
/// testing.
fn parse_into<'p, 'a, 'doc>(
&'p self,
state: ParserState<'p, 'a>,
source: Rc<dyn Source>,
document: &'doc dyn Document<'doc>,
) -> ParserState<'p, 'a>;
/// Adds a rule to the parser.
///
/// # Warning
///
/// This method must not be called if a [`ParserState`] for this parser exists.
fn add_rule(&mut self, rule: Box<dyn Rule>) -> Result<(), String> {
if self
.rules()
.iter()
.any(|other_rule| other_rule.name() == rule.name())
{
return Err(format!(
"Attempted to introduce duplicate rule: `{}`",
rule.name()
));
}
self.rules_mut().push(rule);
Ok(())
}
/// Handles the reports produced by parsing. The default is to output them
/// to stderr, but you are free to modify it.
fn handle_reports(&self, reports: Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
for mut report in reports {
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
sources.insert(source.clone());
if let Some(parent) = source.location() {
let parent_source = parent.source();
if sources.get(&parent_source).is_none() {
recurse_source(sources, parent_source);
}
}
}
report.labels.iter().for_each(|label| {
recurse_source(&mut sources, label.span.0.clone());
});
let cache = sources
.iter()
.map(|source| (source.clone(), source.content().clone()))
.collect::<Vec<(Rc<dyn Source>, String)>>();
cache.iter().for_each(|(source, _)| {
if let Some(location) = source.location() {
if let Some(_s) = source.downcast_ref::<SourceFile>() {
report.labels.push(
Label::new((location.source(), location.start() + 1..location.end()))
.with_message("In file included from here")
.with_order(-1),
);
};
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
let start = location.start()
+ if location.source().content().as_bytes()[location.start()]
== b'\n' { 1 } else { 0 };
report.labels.push(
Label::new((location.source(), start..location.end()))
.with_message("In evaluation of")
.with_order(-1),
);
};
}
});
report.eprint(ariadne::sources(cache)).unwrap()
}
}
} }

View file

@ -1,98 +1,32 @@
use super::layout::LayoutHolder; use super::parser::Parser;
use super::parser::ParserState;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source; use super::source::Source;
use super::source::Token; use super::source::Token;
use super::style::StyleHolder;
use crate::document::document::Document; use crate::document::document::Document;
use ariadne::Report; use ariadne::Report;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
use mlua::Function; use mlua::Function;
use mlua::Lua; use mlua::Lua;
use std::any::Any; use std::any::Any;
use std::collections::HashMap;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
macro_rules! create_registry { pub trait Rule {
( $($construct:expr),+ $(,)? ) => {{ /// Returns rule's name
let mut map = HashMap::new();
$(
let boxed = Box::new($construct) as Box<dyn Rule>;
map.insert(boxed.name(), boxed);
)+
map
}};
}
/// Gets the list of all rules exported with the [`auto_registry`] proc macro.
/// Rules are sorted according to topological order using the [`Rule::previous`] method.
#[auto_registry::generate_registry(registry = "rules", target = make_rules, return_type = HashMap<&'static str, Box<dyn Rule>>, maker = create_registry)]
pub fn get_rule_registry() -> Vec<Box<dyn Rule>> {
fn cmp(
map: &HashMap<&'static str, Box<dyn Rule>>,
lname: &'static str,
rname: &'static str,
) -> std::cmp::Ordering {
let l = map.get(lname).unwrap();
let r = map.get(rname).unwrap();
if l.previous() == Some(r.name()) {
std::cmp::Ordering::Greater
} else if r.previous() == Some(l.name()) {
std::cmp::Ordering::Less
} else if l.previous().is_some() && r.previous().is_none() {
std::cmp::Ordering::Greater
} else if r.previous().is_some() && l.previous().is_none() {
std::cmp::Ordering::Less
} else if let (Some(pl), Some(pr)) = (l.previous(), r.previous()) {
cmp(map, pl, pr)
} else {
std::cmp::Ordering::Equal
}
}
let mut map = make_rules();
let mut sorted_keys = map.keys().copied().collect::<Vec<_>>();
sorted_keys.sort_by(|l, r| cmp(&map, l, r));
let mut owned = Vec::with_capacity(sorted_keys.len());
for key in sorted_keys {
let rule = map.remove(key).unwrap();
owned.push(rule);
}
owned
}
pub trait Rule: Downcast {
/// The rule name
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
/// The name of the rule that should come before this one
fn previous(&self) -> Option<&'static str>;
/// Finds the next match starting from [`cursor`] /// Finds the next match starting from [`cursor`]
fn next_match(&self, state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>; fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
/// Callback when rule matches /// Callback when rule matches
fn on_match<'a>( fn on_match<'a>(
&self, &self,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a (dyn Document<'a> + 'a),
cursor: Cursor, cursor: Cursor,
match_data: Box<dyn Any>, match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>); ) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>);
/// Export bindings to lua
/// Registers lua bindings fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
fn register_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
/// Registers default styles
fn register_styles(&self, _holder: &mut StyleHolder) {}
/// Registers default layouts
fn register_layouts(&self, _holder: &mut LayoutHolder) {}
} }
impl_downcast!(Rule);
impl core::fmt::Debug for dyn Rule { impl core::fmt::Debug for dyn Rule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -100,12 +34,47 @@ impl core::fmt::Debug for dyn Rule {
} }
} }
pub trait RegexRule { /*
/// The rule name pub trait RegexRule: Rule
{
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
/// The name of the rule that should come before this one /// Returns the rule's regex
fn previous(&self) -> Option<&'static str>; fn regex(&self) -> &regex::Regex;
/// Callback on regex rule match
fn on_regex_match<'a>(&self, parser: &Parser, document: &Document, token: Token<'a>, matches: regex::Captures) -> Vec<Report<'a, (String, Range<usize>)>>;
}
impl<T: RegexRule> Rule for T {
fn name(&self) -> &'static str { RegexRule::name(self) }
/// Finds the next match starting from [`cursor`]
fn next_match<'a>(&self, cursor: &'a Cursor) -> Option<usize>
{
let re = self.regex();
let content = cursor.file.content.as_ref().unwrap();
match re.find_at(content.as_str(), cursor.pos)
{
Some(m) => Some(m.start()),
None => None,
}
}
fn on_match<'a>(&self, parser: &Parser, document: &Document, cursor: Cursor<'a>) -> (Cursor<'a>, Vec<Report<'a, (String, Range<usize>)>>)
{
let content = cursor.file.content.as_ref().unwrap();
let matches = self.regex().captures_at(content.as_str(), cursor.pos).unwrap();
let token = Token::new(cursor.pos, matches.get(0).unwrap().len(), cursor.file);
let token_end = token.end();
(cursor.at(token_end), self.on_regex_match(parser, document, token, matches))
}
}
*/
pub trait RegexRule {
fn name(&self) -> &'static str;
/// Returns the rule's regexes /// Returns the rule's regexes
fn regexes(&self) -> &[regex::Regex]; fn regexes(&self) -> &[regex::Regex];
@ -114,51 +83,55 @@ pub trait RegexRule {
fn on_regex_match<'a>( fn on_regex_match<'a>(
&self, &self,
index: usize, index: usize,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a (dyn Document<'a> + 'a),
token: Token, token: Token,
matches: regex::Captures, matches: regex::Captures,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>; ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>;
fn register_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] } fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
fn register_styles(&self, _holder: &mut StyleHolder) {}
fn register_layouts(&self, _holder: &mut LayoutHolder) {}
} }
impl<T: RegexRule + 'static> Rule for T { impl<T: RegexRule> Rule for T {
fn name(&self) -> &'static str { RegexRule::name(self) } fn name(&self) -> &'static str {
fn previous(&self) -> Option<&'static str> { RegexRule::previous(self) } RegexRule::name(self)
}
/// Finds the next match starting from [`cursor`] /// Finds the next match starting from [`cursor`]
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> { fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
let content = cursor.source.content(); let content = cursor.source.content();
let mut found: Option<(usize, usize)> = None; let mut found: Option<(usize, usize)> = None;
self.regexes().iter().enumerate().for_each(|(id, re)| { self.regexes().iter().enumerate().for_each(|(id, re)| {
if let Some(m) = re.find_at(content.as_str(), cursor.pos) { if let Some(m) = re.find_at(content.as_str(), cursor.pos) {
found = found found = found
.map(|(f_pos, f_id)| { .and_then(|(f_pos, f_id)| {
if f_pos > m.start() { if f_pos > m.start() {
(m.start(), id) Some((m.start(), id))
} else { } else {
(f_pos, f_id) Some((f_pos, f_id))
} }
}) })
.or(Some((m.start(), id))); .or(Some((m.start(), id)));
} }
}); });
found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>)) return found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>));
} }
fn on_match<'a>( fn on_match<'a>(
&self, &self,
state: &ParserState, parser: &dyn Parser,
document: &'a (dyn Document<'a> + 'a), document: &'a (dyn Document<'a> + 'a),
cursor: Cursor, cursor: Cursor,
match_data: Box<dyn Any>, match_data: Option<Box<dyn Any>>,
) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) { ) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>) {
let content = cursor.source.content(); let content = cursor.source.content();
let index = match_data.downcast::<usize>().unwrap(); let index = unsafe {
match_data
.unwrap_unchecked()
.downcast::<usize>()
.unwrap_unchecked()
};
let re = &self.regexes()[*index]; let re = &self.regexes()[*index];
let captures = re.captures_at(content.as_str(), cursor.pos).unwrap(); let captures = re.captures_at(content.as_str(), cursor.pos).unwrap();
@ -167,54 +140,11 @@ impl<T: RegexRule + 'static> Rule for T {
let token_end = token.end(); let token_end = token.end();
return ( return (
cursor.at(token_end), cursor.at(token_end),
self.on_regex_match(*index, state, document, token, captures), self.on_regex_match(*index, parser, document, token, captures),
); );
} }
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
self.register_bindings(lua) self.lua_bindings(lua)
}
fn register_styles(&self, holder: &mut StyleHolder) { self.register_styles(holder); }
fn register_layouts(&self, holder: &mut LayoutHolder) { self.register_layouts(holder); }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn registry() {
let rules = get_rule_registry();
let names: Vec<&'static str> = rules.iter().map(|rule| rule.name()).collect();
assert_eq!(
names,
vec![
"Comment",
"Paragraph",
"Import",
"Script",
"Element Style",
"Variable",
"Variable Substitution",
"Raw",
"List",
"Blockquote",
"Code",
"Tex",
"Graphviz",
"Media",
"Layout",
"Style",
"Custom Style",
"Section",
"Link",
"Text",
"Reference",
]
);
} }
} }

View file

@ -24,6 +24,7 @@ impl core::fmt::Display for dyn Source {
} }
impl core::fmt::Debug for dyn Source { impl core::fmt::Debug for dyn Source {
// TODO
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Source{{{}}}", self.name()) write!(f, "Source{{{}}}", self.name())
} }
@ -54,10 +55,10 @@ impl SourceFile {
pub fn new(path: String, location: Option<Token>) -> Result<Self, String> { pub fn new(path: String, location: Option<Token>) -> Result<Self, String> {
match fs::read_to_string(&path) { match fs::read_to_string(&path) {
Err(_) => { Err(_) => {
Err(format!( return Err(String::from(format!(
"Unable to read file content: `{}`", "Unable to read file content: `{}`",
path path
)) )))
} }
Ok(content) => Ok(Self { Ok(content) => Ok(Self {
location, location,
@ -69,9 +70,9 @@ impl SourceFile {
pub fn with_content(path: String, content: String, location: Option<Token>) -> Self { pub fn with_content(path: String, content: String, location: Option<Token>) -> Self {
Self { Self {
location, location: location,
path, path: path,
content, content: content,
} }
} }
} }
@ -161,7 +162,7 @@ impl Token {
} }
pub fn source(&self) -> Rc<dyn Source> { pub fn source(&self) -> Rc<dyn Source> {
self.source.clone() return self.source.clone();
} }
/// Construct Token from a range /// Construct Token from a range
@ -175,10 +176,10 @@ impl Token {
} }
pub fn start(&self) -> usize { pub fn start(&self) -> usize {
self.range.start return self.range.start;
} }
pub fn end(&self) -> usize { pub fn end(&self) -> usize {
self.range.end return self.range.end;
} }
} }

View file

@ -9,7 +9,7 @@ use downcast_rs::Downcast;
use crate::document::document::Document; use crate::document::document::Document;
use super::parser::ParserState; use super::parser::Parser;
use super::source::Source; use super::source::Source;
/// Scope for state objects /// Scope for state objects
@ -25,69 +25,75 @@ pub enum Scope {
PARAGRAPH = 2, PARAGRAPH = 2,
} }
pub trait RuleState: Downcast { pub trait State: Downcast {
/// Returns the state's [`Scope`] /// Returns the state's [`Scope`]
fn scope(&self) -> Scope; fn scope(&self) -> Scope;
/// Callback called when state goes out of scope /// Callback called when state goes out of scope
fn on_remove<'a>( fn on_remove<'a>(
&self, &self,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>; ) -> Vec<Report<'a, (Rc<dyn Source>, Range<usize>)>>;
} }
impl_downcast!(RuleState); impl_downcast!(State);
impl core::fmt::Debug for dyn RuleState { impl core::fmt::Debug for dyn State {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "State{{Scope: {:#?}}}", self.scope()) write!(f, "State{{Scope: {:#?}}}", self.scope())
} }
} }
/// Object owning all the states /// Object owning all the states
#[derive(Default)] #[derive(Debug)]
pub struct RuleStateHolder { pub struct StateHolder {
states: HashMap<String, Rc<RefCell<dyn RuleState>>>, data: HashMap<String, Rc<RefCell<dyn State>>>,
} }
impl RuleStateHolder { impl StateHolder {
pub fn new() -> Self {
Self {
data: HashMap::new(),
}
}
// Attempts to push [`state`]. On collision, returns an error with the already present state
pub fn insert( pub fn insert(
&mut self, &mut self,
name: String, name: String,
state: Rc<RefCell<dyn RuleState>>, state: Rc<RefCell<dyn State>>,
) -> Result<Rc<RefCell<dyn RuleState>>, String> { ) -> Result<Rc<RefCell<dyn State>>, Rc<RefCell<dyn State>>> {
if self.states.contains_key(name.as_str()) { match self.data.insert(name, state.clone()) {
return Err(format!("Attempted to insert duplicate RuleState: {name}")); Some(state) => Err(state),
_ => Ok(state),
} }
self.states.insert(name, state.clone());
Ok(state)
} }
pub fn get(&self, state_name: &str) -> Option<Rc<RefCell<dyn RuleState>>> { pub fn query(&self, name: &String) -> Option<Rc<RefCell<dyn State>>> {
self.states.get(state_name).cloned() self.data.get(name).map_or(None, |st| Some(st.clone()))
} }
pub fn on_scope_end( pub fn on_scope_end(
&mut self, &mut self,
state: &ParserState, parser: &dyn Parser,
document: &dyn Document, document: &dyn Document,
scope: Scope, scope: Scope,
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> { ) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
let mut reports = vec![]; let mut result = vec![];
self.states.retain(|_name, rule_state| { self.data.retain(|_name, state| {
if rule_state.borrow().scope() >= scope { if state.borrow().scope() >= scope {
rule_state state
.borrow_mut() .borrow()
.on_remove(state, document) .on_remove(parser, document)
.drain(..) .drain(..)
.for_each(|report| reports.push(report)); .for_each(|report| result.push(report));
false false
} else { } else {
true true
} }
}); });
reports return result;
} }
} }

View file

@ -1,78 +0,0 @@
use std::collections::HashMap;
use std::rc::Rc;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
/// Styling for an element
pub trait ElementStyle: Downcast + core::fmt::Debug {
/// The style key
fn key(&self) -> &'static str;
/// Attempts to create a new style from a [`json`] string
///
/// # Errors
///
/// Will fail if deserialization fails
fn from_json(&self, json: &str) -> Result<Rc<dyn ElementStyle>, String>;
/// Attempts to deserialize lua table into a new style
fn from_lua(
&self,
lua: &mlua::Lua,
value: mlua::Value,
) -> Result<Rc<dyn ElementStyle>, mlua::Error>;
}
impl_downcast!(ElementStyle);
#[derive(Default)]
pub struct StyleHolder {
styles: HashMap<String, Rc<dyn ElementStyle>>,
}
impl StyleHolder {
/// Checks if a given style key is registered
pub fn is_registered(&self, style_key: &str) -> bool { self.styles.contains_key(style_key) }
/// Gets the current active style for an element
/// NOTE: Will panic if a style is not defined for a given element
/// If you need to process user input, use [`is_registered`]
pub fn current(&self, style_key: &str) -> Rc<dyn ElementStyle> {
self.styles.get(style_key).cloned().unwrap()
}
/// Sets the [`style`]
pub fn set_current(&mut self, style: Rc<dyn ElementStyle>) {
self.styles.insert(style.key().to_string(), style);
}
}
#[macro_export]
macro_rules! impl_elementstyle {
($t:ty, $key:expr) => {
impl $crate::parser::style::ElementStyle for $t {
fn key(&self) -> &'static str { $key }
fn from_json(
&self,
json: &str,
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, String> {
serde_json::from_str::<$t>(json)
.map_err(|e| e.to_string())
.map(|obj| {
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
})
}
fn from_lua(
&self,
lua: &mlua::Lua,
value: mlua::Value,
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, mlua::Error> {
mlua::LuaSerdeExt::from_value::<$t>(lua, value).map(|obj| {
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
})
}
}
};
}

View file

@ -8,7 +8,7 @@ use crate::document::document::DocumentAccessors;
use crate::document::element::ElemKind; use crate::document::element::ElemKind;
use crate::elements::paragraph::Paragraph; use crate::elements::paragraph::Paragraph;
use super::parser::ParserState; use super::parser::Parser;
use super::source::Source; use super::source::Source;
/// Processes text for escape characters and paragraphing /// Processes text for escape characters and paragraphing
@ -36,7 +36,7 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
.last_element::<Paragraph>() .last_element::<Paragraph>()
.and_then(|par| { .and_then(|par| {
par.find_back(|e| e.kind() != ElemKind::Invisible) par.find_back(|e| e.kind() != ElemKind::Invisible)
.map(|e| e.kind() == ElemKind::Inline) .and_then(|e| Some(e.kind() == ElemKind::Inline))
}) })
.unwrap_or(false) .unwrap_or(false)
{ {
@ -79,12 +79,12 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
} }
} }
(out + g, Some(g)) return (out + g, Some(g));
}) })
.0 .0
.to_string(); .to_string();
processed return processed;
} }
/// Processed a string and escapes a single token out of it /// Processed a string and escapes a single token out of it
@ -111,7 +111,7 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
escaped += 1; escaped += 1;
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) { } else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
let _ = token_it.next(); let _ = token_it.next();
if token_it.peek().is_none() { if token_it.peek() == None {
(0..(escaped / 2)).for_each(|_| processed.push(escape)); (0..(escaped / 2)).for_each(|_| processed.push(escape));
escaped = 0; escaped = 0;
token_it = token.chars().peekable(); token_it = token.chars().peekable();
@ -136,25 +136,17 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
/// Parses source into a single paragraph /// Parses source into a single paragraph
/// If source contains anything but a single paragraph, an error is returned /// If source contains anything but a single paragraph, an error is returned
pub fn parse_paragraph<'a>( pub fn parse_paragraph<'a>(
state: &ParserState, parser: &dyn Parser,
source: Rc<dyn Source>, source: Rc<dyn Source>,
document: &'a dyn Document<'a>, document: &'a dyn Document<'a>,
) -> Result<Box<Paragraph>, &'static str> { ) -> Result<Box<Paragraph>, &'static str> {
let parsed = state.with_state(|new_state| -> Box<dyn Document> { let parsed = parser.parse(source.clone(), Some(document));
new_state
.parser
.parse(new_state, source.clone(), Some(document))
.0
});
if parsed.content().borrow().len() > 1 { if parsed.content().borrow().len() > 1 {
return Err("Parsed document contains more than a single paragraph"); return Err("Parsed document contains more than a single paragraph");
} else if parsed.content().borrow().len() == 0 { } else if parsed.content().borrow().len() == 0 {
return Err("Parsed document is empty"); return Err("Parsed document is empty");
} else if parsed.last_element::<Paragraph>().is_none() { } else if parsed.last_element::<Paragraph>().is_none() {
return Err("Parsed element is not a paragraph"); return Err("Parsed element is not a paragraph");
} else if state.parser.has_error() {
// FIXME: If parser had an error before, this wold trigger
return Err("Parser error");
} }
let paragraph = parsed.content().borrow_mut().pop().unwrap(); let paragraph = parsed.content().borrow_mut().pop().unwrap();
@ -237,12 +229,13 @@ impl<'a> PropertyMap<'a> {
} }
} }
#[derive(Debug)]
pub struct PropertyParser { pub struct PropertyParser {
pub properties: HashMap<String, Property>, properties: HashMap<String, Property>,
} }
impl PropertyParser { impl PropertyParser {
pub fn new(properties: HashMap<String, Property>) -> Self { Self { properties } }
/// Attempts to build a default propertymap /// Attempts to build a default propertymap
/// ///
/// Returns an error if at least one [`Property`] is required and doesn't provide a default /// Returns an error if at least one [`Property`] is required and doesn't provide a default
@ -278,7 +271,7 @@ impl PropertyParser {
/// properties.insert("width".to_string(), /// properties.insert("width".to_string(),
/// Property::new(true, "Width of the element in em".to_string(), None)); /// Property::new(true, "Width of the element in em".to_string(), None));
/// ///
/// let parser = PropertyParser { properties }; /// let parser = PropertyParser::new(properties);
/// let pm = parser.parse("width=15").unwrap(); /// let pm = parser.parse("width=15").unwrap();
/// ///
/// assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15); /// assert_eq!(pm.get("width", |_, s| s.parse::<i32>()).unwrap().1, 15);
@ -333,8 +326,9 @@ impl PropertyParser {
escaped = 0; escaped = 0;
in_name = true; in_name = true;
try_insert(&name, &value)?; if let Err(e) = try_insert(&name, &value) {
return Err(e);
}
name.clear(); name.clear();
value.clear(); value.clear();
} else { } else {
@ -360,7 +354,9 @@ impl PropertyParser {
return Err("Expected non empty property list.".to_string()); return Err("Expected non empty property list.".to_string());
} }
try_insert(&name, &value)?; if let Err(e) = try_insert(&name, &value) {
return Err(e);
}
if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| { if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| {
if !properties.properties.contains_key(key) { if !properties.properties.contains_key(key) {
@ -420,26 +416,20 @@ mod tests {
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Comment { .push(Box::new(Comment::new(tok.clone(), "COMMENT".to_string())));
location: tok.clone(),
content: "COMMENT".into(),
}))
.unwrap();
assert_eq!(process_text(&doc, "\na"), "a"); assert_eq!(process_text(&doc, "\na"), "a");
// A space is appended as previous element is inline // A space is appended as previous element is inline
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Text::new(tok.clone(), "TEXT".to_string()))) .push(Box::new(Text::new(tok.clone(), "TEXT".to_string())));
.unwrap();
assert_eq!(process_text(&doc, "\na"), " a"); assert_eq!(process_text(&doc, "\na"), " a");
(&doc as &dyn Document) (&doc as &dyn Document)
.last_element_mut::<Paragraph>() .last_element_mut::<Paragraph>()
.unwrap() .unwrap()
.push(Box::new(Style::new(tok.clone(), 0, false))) .push(Box::new(Style::new(tok.clone(), 0, false)));
.unwrap();
assert_eq!(process_text(&doc, "\na"), " a"); assert_eq!(process_text(&doc, "\na"), " a");
} }
@ -501,7 +491,7 @@ mod tests {
Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())), Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())),
); );
let parser = PropertyParser { properties }; let parser = PropertyParser::new(properties);
let pm = parser.parse("width=15,length=-10").unwrap(); let pm = parser.parse("width=15,length=-10").unwrap();
// Ok // Ok

127
style.css
View file

@ -2,37 +2,9 @@ body {
background-color: #1b1b1d; background-color: #1b1b1d;
color: #c5c5c5; color: #c5c5c5;
font-family: sans-serif; font-family: sans-serif;
margin: 0;
padding: 0;
}
.layout { max-width: 90ch;
display: flex;
}
.content {
max-width: 99ch;
margin: 0 auto; margin: 0 auto;
padding: 0;
width: 100%;
}
/* Layouts */
div.centered {
text-align: center;
}
div.split-container {
display: flex;
width: 100%;
}
div.split-container > div.split {
flex: 1;
flex-shrink: 0;
overflow-x: auto;
margin: 0.5em;
} }
/* Styles */ /* Styles */
@ -57,18 +29,13 @@ a.inline-code {
} }
/* Navbar */ /* Navbar */
.navbar { #navbar {
display: none;
left: 0; left: 0;
top: 0; top: 0;
bottom: 0; bottom: 0;
width: max(calc((100vw - 99ch) / 2 - 15vw), 24ch); width: max(16vw, 20ch);
height: 100vh;
position: fixed;
margin-right: 1em;
overflow-y: auto; overflow-y: auto;
position: absolute;
box-sizing: border-box; box-sizing: border-box;
overscroll-behavior-y: contain; overscroll-behavior-y: contain;
@ -79,53 +46,44 @@ a.inline-code {
font-weight: bold; font-weight: bold;
} }
@media (min-width: 130ch) { #navbar a {
.navbar {
display: block;
}
.container {
flex-direction: row;
}
}
.navbar a {
color: #ffb454; color: #ffb454;
text-decoration: none; text-decoration: none;
font-weight: normal; font-weight: normal;
} }
.navbar li { #navbar li {
display: block; display: block;
position: relative; position: relative;
padding-left: 1em; padding-left: 1em;
margin-left: 0em; margin-left: 0em;
} }
.navbar ul { #navbar ul {
margin-left: 0em; margin-left: 0em;
padding-left: 0; padding-left: 0;
} }
.navbar summary{ #navbar summary{
display: block; display: block;
cursor: pointer; cursor: pointer;
} }
.navbar summary::marker, #navbar summary::marker,
.navbar summary::-webkit-details-marker{ #navbar summary::-webkit-details-marker{
display: none; display: none;
} }
.navbar summary:focus{ #navbar summary:focus{
outline: none; outline: none;
} }
.navbar summary:focus-visible{ #navbar summary:focus-visible{
outline: 1px dotted #000; outline: 1px dotted #000;
} }
.navbar summary:before { #navbar summary:before {
content: "+"; content: "+";
color: #ffb454; color: #ffb454;
float: left; float: left;
@ -133,15 +91,10 @@ a.inline-code {
width: 1em; width: 1em;
} }
.navbar details[open] > summary:before { #navbar details[open] > summary:before {
content: ""; content: "";
} }
/* Sections */
a.section-link {
text-decoration: none;
}
/* Code blocks */ /* Code blocks */
div.code-block-title { div.code-block-title {
background-color: #20202a; background-color: #20202a;
@ -149,10 +102,11 @@ div.code-block-title {
} }
div.code-block-content { div.code-block-content {
max-height: 38em; max-height: 20em;
margin-bottom: 0.2em; margin-bottom: 0.2em;
width: auto;
overflow: scroll; overflow: auto;
background-color: #0f141a; background-color: #0f141a;
} }
@ -165,7 +119,6 @@ div.code-block-content td {
div.code-block-content pre { div.code-block-content pre {
border: 0; border: 0;
margin: 0; margin: 0;
tab-size: 4;
} }
div.code-block-content .code-block-gutter { div.code-block-content .code-block-gutter {
@ -201,7 +154,7 @@ div.code-block-content .code-block-line {
margin-right: .5em; margin-right: .5em;
} }
.medium img, video, audio { .medium img {
max-width: 100%; max-width: 100%;
} }
@ -227,7 +180,6 @@ a.medium-ref {
font-weight: bold; font-weight: bold;
color: #d367c1; color: #d367c1;
text-decoration: none;
} }
a.medium-ref:hover { a.medium-ref:hover {
@ -239,50 +191,11 @@ a.medium-ref img {
margin: 1.3em 0 0 0; margin: 1.3em 0 0 0;
} }
a.medium-ref video {
display: none;
margin: 1.3em 0 0 0;
}
a:hover.medium-ref img { a:hover.medium-ref img {
max-width: 25%; max-width: 50%;
left: 37.5%; margin: auto;
display: inline-block; display: inline-block;
position: absolute; position: absolute;
box-shadow: 0px 0px 6px 2px rgba(0,0,0,0.75); box-shadow: 0px 0px 6px 2px rgba(0,0,0,0.75);
} }
a:hover.medium-ref video {
max-width: 25%;
left: 37.5%;
display: inline-block;
position: absolute;
box-shadow: 0px 0px 6px 2px rgba(0, 0, 0, 0.75);
}
/* Blockquote */
blockquote {
margin-left: 0.2em;
padding-left: 0.6em;
border-left: 4px solid #0ff08b;
}
blockquote p::before {
content: '\201C';
}
blockquote p::after {
content: '\201D';
}
.blockquote-author:before {
content: '—';
}
.blockquote-author {
margin-left: 0.2em;
}

View file

@ -1,7 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""latex2svg """latex2svg
-- This version of latex2svg comes with NML and has been modified to work with it only --
-- The original version can be found here : https://github.com/Moonbase59/latex2svg --
Read LaTeX code from stdin and render a SVG using LaTeX, dvisvgm and svgo. Read LaTeX code from stdin and render a SVG using LaTeX, dvisvgm and svgo.
@ -25,6 +23,38 @@ import re
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from ctypes.util import find_library from ctypes.util import find_library
default_template = r"""
\documentclass[{{ fontsize }}pt,preview]{standalone}
{{ preamble }}
\begin{document}
\begin{preview}
{{ code }}
\end{preview}
\end{document}
"""
default_preamble = r"""
\usepackage[utf8x]{inputenc}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{amstext}
\usepackage{newtxtext}
\usepackage[libertine]{newtxmath}
% prevent errors from old font commands
\DeclareOldFontCommand{\rm}{\normalfont\rmfamily}{\mathrm}
\DeclareOldFontCommand{\sf}{\normalfont\sffamily}{\mathsf}
\DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt}
\DeclareOldFontCommand{\bf}{\normalfont\bfseries}{\mathbf}
\DeclareOldFontCommand{\it}{\normalfont\itshape}{\mathit}
\DeclareOldFontCommand{\sl}{\normalfont\slshape}{\@nomath\sl}
\DeclareOldFontCommand{\sc}{\normalfont\scshape}{\@nomath\sc}
% prevent errors from undefined shortcuts
\newcommand{\N}{\mathbb{N}}
\newcommand{\R}{\mathbb{R}}
\newcommand{\Z}{\mathbb{Z}}
"""
default_svgo_config = r""" default_svgo_config = r"""
module.exports = { module.exports = {
plugins: [ plugins: [
@ -54,6 +84,8 @@ svgo_cmd = 'svgo'
default_params = { default_params = {
'fontsize': 12, # TeX pt 'fontsize': 12, # TeX pt
'template': default_template,
'preamble': default_preamble,
'latex_cmd': latex_cmd, 'latex_cmd': latex_cmd,
'dvisvgm_cmd': dvisvgm_cmd, 'dvisvgm_cmd': dvisvgm_cmd,
'svgo_cmd': svgo_cmd, 'svgo_cmd': svgo_cmd,
@ -205,15 +237,22 @@ def main():
""") """)
parser.add_argument('--version', action='version', parser.add_argument('--version', action='version',
version='%(prog)s {version}'.format(version=__version__)) version='%(prog)s {version}'.format(version=__version__))
parser.add_argument('--preamble',
help="LaTeX preamble code to read from file")
parser.add_argument('--fontsize', parser.add_argument('--fontsize',
help="LaTeX fontsize in pt") help="LaTeX fontsize in pt")
args = parser.parse_args() args = parser.parse_args()
preamble = default_preamble
if args.preamble is not None:
with open(args.preamble) as f:
preamble = f.read()
fontsize = 12 fontsize = 12
if args.fontsize is not None: if args.fontsize is not None:
fontsize = int(args.fontsize) fontsize = int(args.fontsize)
latex = sys.stdin.read() latex = sys.stdin.read()
try: try:
params = default_params.copy() params = default_params.copy()
params['preamble'] = preamble
params['fontsize'] = fontsize params['fontsize'] = fontsize
out = latex2svg(latex, params) out = latex2svg(latex, params)
sys.stdout.write(out['svg']) sys.stdout.write(out['svg'])