Lua & first bindings
This commit is contained in:
parent
f89259eef5
commit
461738dab9
28 changed files with 601 additions and 239 deletions
9
src/cache/cache.rs
vendored
9
src/cache/cache.rs
vendored
|
@ -45,6 +45,15 @@ pub trait Cached
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Attempts to retrieve a cached element from the compilation database
|
||||||
|
/// or create it (and insert it), if it doesn't exist
|
||||||
|
///
|
||||||
|
/// # Error
|
||||||
|
///
|
||||||
|
/// Will return an error if the database connection(s) fail,
|
||||||
|
/// or if not cached, an error from the generator [`f`]
|
||||||
|
///
|
||||||
|
/// Note that on error, [`f`] may still have been called
|
||||||
fn cached<E, F>(&self, con: &mut Connection, f: F)
|
fn cached<E, F>(&self, con: &mut Connection, f: F)
|
||||||
-> Result<<Self as Cached>::Value, CachedError<E>>
|
-> Result<<Self as Cached>::Value, CachedError<E>>
|
||||||
where
|
where
|
||||||
|
|
|
@ -63,33 +63,3 @@ pub trait ReferenceableElement : Element {
|
||||||
/// Reference name
|
/// Reference name
|
||||||
fn reference_name(&self) -> Option<&String>;
|
fn reference_name(&self) -> Option<&String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Text
|
|
||||||
{
|
|
||||||
location: Token,
|
|
||||||
content: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Text
|
|
||||||
{
|
|
||||||
pub fn new(location: Token, content: String) -> Text
|
|
||||||
{
|
|
||||||
Text {
|
|
||||||
location: location,
|
|
||||||
content: content
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Element for Text
|
|
||||||
{
|
|
||||||
fn location(&self) -> &Token { &self.location }
|
|
||||||
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
|
||||||
fn element_name(&self) -> &'static str { "Text" }
|
|
||||||
fn to_string(&self) -> String { format!("{self:#?}") }
|
|
||||||
|
|
||||||
fn compile(&self, compiler: &Compiler, _document: &Document) -> Result<String, String> {
|
|
||||||
Ok(compiler.sanitize(self.content.as_str()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::{path::PathBuf, rc::Rc};
|
use std::{path::PathBuf, rc::Rc};
|
||||||
use crate::parser::{parser::Parser, source::{Source, Token, VirtualSource}};
|
use crate::{elements::text::Text, parser::{parser::Parser, source::{Source, Token, VirtualSource}}};
|
||||||
use super::{document::Document, element::Text};
|
use super::{document::Document};
|
||||||
|
|
||||||
|
|
||||||
// TODO enforce to_string(from_string(to_string())) == to_string()
|
// TODO enforce to_string(from_string(to_string())) == to_string()
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::{collections::HashMap, ops::Range, rc::Rc, sync::Once};
|
||||||
|
|
||||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||||
use crypto::{digest::Digest, sha2::Sha512};
|
use crypto::{digest::Digest, sha2::Sha512};
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use syntect::{easy::HighlightLines, highlighting::ThemeSet, parsing::SyntaxSet};
|
use syntect::{easy::HighlightLines, highlighting::ThemeSet, parsing::SyntaxSet};
|
||||||
|
|
||||||
|
@ -387,4 +388,7 @@ impl RegexRule for CodeRule
|
||||||
|
|
||||||
reports
|
reports
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use crate::parser::{parser::Parser, rule::RegexRule, source::{Source, Token}};
|
use crate::parser::{parser::Parser, rule::RegexRule, source::{Source, Token}};
|
||||||
use ariadne::{Report, Label, ReportKind};
|
use ariadne::{Report, Label, ReportKind};
|
||||||
|
@ -78,4 +79,6 @@ impl RegexRule for CommentRule {
|
||||||
|
|
||||||
return reports;
|
return reports;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use crate::parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, SourceFile, Token}};
|
use crate::parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, SourceFile, Token}};
|
||||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||||
|
@ -152,4 +153,6 @@ impl RegexRule for ImportRule {
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use crate::parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util};
|
use crate::parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util};
|
||||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||||
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}};
|
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}};
|
||||||
|
@ -146,4 +148,7 @@ impl RegexRule for LinkRule {
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::{any::Any, cell::Ref, ops::Range, rc::Rc};
|
||||||
|
|
||||||
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token, VirtualSource}}};
|
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token, VirtualSource}}};
|
||||||
use ariadne::{Label, Report, ReportKind};
|
use ariadne::{Label, Report, ReportKind};
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
use super::paragraph::Paragraph;
|
use super::paragraph::Paragraph;
|
||||||
|
@ -332,4 +333,7 @@ impl Rule for ListRule
|
||||||
|
|
||||||
(end_cursor, reports)
|
(end_cursor, reports)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
pub mod registrar;
|
pub mod registrar;
|
||||||
|
pub mod text;
|
||||||
pub mod comment;
|
pub mod comment;
|
||||||
pub mod paragraph;
|
pub mod paragraph;
|
||||||
pub mod variable;
|
pub mod variable;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::{any::Any, ops::Range, rc::Rc};
|
use std::{any::Any, ops::Range, rc::Rc};
|
||||||
|
|
||||||
use ariadne::Report;
|
use ariadne::Report;
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token}}};
|
use crate::{compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::Rule, source::{Cursor, Source, Token}}};
|
||||||
|
@ -124,4 +125,7 @@ impl Rule for ParagraphRule
|
||||||
|
|
||||||
(end_cursor, Vec::new())
|
(end_cursor, Vec::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyParser}}};
|
use crate::{compiler::compiler::Compiler, document::element::{ElemKind, Element}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyParser}}};
|
||||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||||
|
@ -161,4 +162,7 @@ impl RegexRule for RawRule
|
||||||
|
|
||||||
reports
|
reports
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::parser::parser::Parser;
|
use crate::parser::parser::Parser;
|
||||||
|
|
||||||
use super::{code::CodeRule, comment::CommentRule, import::ImportRule, link::LinkRule, list::ListRule, paragraph::ParagraphRule, raw::RawRule, script::ScriptRule, section::SectionRule, style::StyleRule, tex::TexRule, variable::{VariableRule, VariableSubstitutionRule}};
|
use super::{code::CodeRule, comment::CommentRule, import::ImportRule, link::LinkRule, list::ListRule, paragraph::ParagraphRule, raw::RawRule, script::ScriptRule, section::SectionRule, style::StyleRule, tex::TexRule, text::TextRule, variable::{VariableRule, VariableSubstitutionRule}};
|
||||||
|
|
||||||
|
|
||||||
pub fn register<P: Parser>(parser: &mut P)
|
pub fn register<P: Parser>(parser: &mut P)
|
||||||
|
@ -19,4 +19,5 @@ pub fn register<P: Parser>(parser: &mut P)
|
||||||
parser.add_rule(Box::new(StyleRule::new()), None);
|
parser.add_rule(Box::new(StyleRule::new()), None);
|
||||||
parser.add_rule(Box::new(SectionRule::new()), None);
|
parser.add_rule(Box::new(SectionRule::new()), None);
|
||||||
parser.add_rule(Box::new(LinkRule::new()), None);
|
parser.add_rule(Box::new(LinkRule::new()), None);
|
||||||
|
parser.add_rule(Box::new(TextRule::default()), None);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,24 +1,28 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use crate::{document::element::Text, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token, VirtualSource}, util}};
|
use crate::{lua::kernel::{Kernel, KernelContext, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token, VirtualSource}, util}};
|
||||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||||
use crate::document::document::Document;
|
use crate::document::document::Document;
|
||||||
use std::{ops::Range, rc::Rc};
|
use std::{ops::Range, rc::Rc};
|
||||||
|
|
||||||
|
use super::text::Text;
|
||||||
|
|
||||||
pub struct ScriptRule
|
pub struct ScriptRule
|
||||||
{
|
{
|
||||||
re: [Regex; 2],
|
re: [Regex; 2],
|
||||||
eval_kinds: [(&'static str, &'static str); 2]
|
eval_kinds: [(&'static str, &'static str); 3]
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ScriptRule {
|
impl ScriptRule {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
re: [
|
re: [
|
||||||
Regex::new(r"(?:^|\n)@<(?:(.*)\n?)((?:\\.|[^\[\]\\])*?)(?:\n?)>@").unwrap(),
|
Regex::new(r"(?:^|\n)@<(?:(.*)\n?)((?:\\.|[^\\\\])*?)(?:\n?)>@").unwrap(),
|
||||||
Regex::new(r"%<([^\s[:alpha:]])?(?:\[(.*?)\])?((?:\\.|[^\[\]\\])*?)(?:\n?)>%").unwrap()
|
Regex::new(r"%<([^\s[:alpha:]])?(?:\[(.*?)\])?((?:\\.|[^\\\\])*?)(?:\n?)>%").unwrap()
|
||||||
],
|
],
|
||||||
eval_kinds: [
|
eval_kinds: [
|
||||||
("", "Eval to text"),
|
("", "Eval"),
|
||||||
|
("\"", "Eval to text"),
|
||||||
("!", "Eval and parse"),
|
("!", "Eval and parse"),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -87,7 +91,7 @@ impl RegexRule for ScriptRule
|
||||||
})
|
})
|
||||||
.unwrap_or("main");
|
.unwrap_or("main");
|
||||||
let kernel = parser.get_kernel(kernel_name).unwrap_or_else(|| {
|
let kernel = parser.get_kernel(kernel_name).unwrap_or_else(|| {
|
||||||
parser.insert_kernel(kernel_name.to_string(), Kernel::new())
|
parser.insert_kernel(kernel_name.to_string(), Kernel::new(parser))
|
||||||
});
|
});
|
||||||
|
|
||||||
let kernel_data = matches.get(if index == 0 {2} else {3})
|
let kernel_data = matches.get(if index == 0 {2} else {3})
|
||||||
|
@ -116,14 +120,15 @@ impl RegexRule for ScriptRule
|
||||||
util::process_escaped('\\', ">@", kernel_content)
|
util::process_escaped('\\', ">@", kernel_content)
|
||||||
)) as Rc<dyn Source>;
|
)) as Rc<dyn Source>;
|
||||||
|
|
||||||
let chunk = kernel.lua.load(source.content())
|
let execute = |lua: &Lua|
|
||||||
|
{
|
||||||
|
let chunk = lua.load(source.content())
|
||||||
.set_name(kernel_name);
|
.set_name(kernel_name);
|
||||||
if index == 0 // @< ... >@ -> Exec
|
|
||||||
|
if index == 0 // Exec
|
||||||
{
|
{
|
||||||
match chunk.exec()
|
if let Err(e) = chunk.exec()
|
||||||
{
|
{
|
||||||
Ok(_) => {},
|
|
||||||
Err(e) => {
|
|
||||||
reports.push(
|
reports.push(
|
||||||
Report::build(ReportKind::Error, source.clone(), 0)
|
Report::build(ReportKind::Error, source.clone(), 0)
|
||||||
.with_message("Invalid kernel code")
|
.with_message("Invalid kernel code")
|
||||||
|
@ -132,11 +137,12 @@ impl RegexRule for ScriptRule
|
||||||
.with_message(format!("Kernel execution failed:\n{}", e.to_string()))
|
.with_message(format!("Kernel execution failed:\n{}", e.to_string()))
|
||||||
.with_color(parser.colors().error))
|
.with_color(parser.colors().error))
|
||||||
.finish());
|
.finish());
|
||||||
|
return reports;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
else // Eval
|
||||||
else if index == 1 // %< ... >% -> Eval
|
|
||||||
{
|
{
|
||||||
|
// Validate kind
|
||||||
let kind = match matches.get(1) {
|
let kind = match matches.get(1) {
|
||||||
None => 0,
|
None => 0,
|
||||||
Some(kind) => {
|
Some(kind) => {
|
||||||
|
@ -158,10 +164,26 @@ impl RegexRule for ScriptRule
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if kind == 0 // Eval
|
||||||
|
{
|
||||||
|
if let Err(e) = chunk.eval::<()>()
|
||||||
|
{
|
||||||
|
reports.push(
|
||||||
|
Report::build(ReportKind::Error, source.clone(), 0)
|
||||||
|
.with_message("Invalid kernel code")
|
||||||
|
.with_label(
|
||||||
|
Label::new((source.clone(), 0..source.content().len()))
|
||||||
|
.with_message(format!("Kernel evaluation failed:\n{}", e.to_string()))
|
||||||
|
.with_color(parser.colors().error))
|
||||||
|
.finish());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else // Eval to string
|
||||||
|
{
|
||||||
match chunk.eval::<String>()
|
match chunk.eval::<String>()
|
||||||
{
|
{
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
if kind == 0 // Eval to text
|
if kind == 1 // Eval to text
|
||||||
{
|
{
|
||||||
if !result.is_empty()
|
if !result.is_empty()
|
||||||
{
|
{
|
||||||
|
@ -171,14 +193,13 @@ impl RegexRule for ScriptRule
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if kind == 1 // Eval and Parse
|
else if kind == 2 // Eval and Parse
|
||||||
{
|
{
|
||||||
let parse_source = Rc::new(VirtualSource::new(
|
let parse_source = Rc::new(VirtualSource::new(
|
||||||
Token::new(0..source.content().len(), source.clone()),
|
Token::new(0..source.content().len(), source.clone()),
|
||||||
format!("parse({})", source.name()),
|
format!("parse({})", source.name()),
|
||||||
result
|
result
|
||||||
)) as Rc<dyn Source>;
|
)) as Rc<dyn Source>;
|
||||||
//println!("SRC={parse_source:#?}, {}", parse_source.content());
|
|
||||||
|
|
||||||
parser.parse_into(parse_source, document);
|
parser.parse_into(parse_source, document);
|
||||||
}
|
}
|
||||||
|
@ -195,7 +216,20 @@ impl RegexRule for ScriptRule
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
reports
|
reports
|
||||||
|
};
|
||||||
|
|
||||||
|
let ctx = KernelContext {
|
||||||
|
location: Token::new(0..source.content().len(), source.clone()),
|
||||||
|
parser,
|
||||||
|
document
|
||||||
|
};
|
||||||
|
|
||||||
|
kernel.run_with_context(ctx, execute)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,17 @@
|
||||||
|
use mlua::{Error::BadArgument, Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use crate::{compiler::compiler::Target, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}};
|
use crate::{compiler::compiler::Target, lua::kernel::CTX, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}}};
|
||||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||||
use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element, ReferenceableElement}}};
|
use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element, ReferenceableElement}}};
|
||||||
use std::{ops::Range, rc::Rc};
|
use std::{ops::Range, rc::Rc, sync::Arc};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Section {
|
pub struct Section {
|
||||||
location: Token,
|
pub(self) location: Token,
|
||||||
title: String, // Section title
|
pub(self) title: String, // Section title
|
||||||
depth: usize, // Section depth
|
pub(self) depth: usize, // Section depth
|
||||||
kind: u8, // Section kind, e.g numbered, unnumbred, ...
|
pub(self) kind: u8, // Section kind, e.g numbered, unnumbred, ...
|
||||||
reference: Option<String>, // Section reference name
|
pub(self) reference: Option<String>, // Section reference name
|
||||||
}
|
|
||||||
|
|
||||||
impl Section
|
|
||||||
{
|
|
||||||
pub fn new(location: Token, title: String, depth: usize, kind: u8, reference: Option<String>) -> Self {
|
|
||||||
Self { location: location, title, depth, kind, reference }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Element for Section
|
impl Element for Section
|
||||||
|
@ -194,15 +188,50 @@ impl RegexRule for SectionRule {
|
||||||
};
|
};
|
||||||
|
|
||||||
parser.push(document, Box::new(
|
parser.push(document, Box::new(
|
||||||
Section::new(
|
Section {
|
||||||
token.clone(),
|
location: token.clone(),
|
||||||
section_name,
|
title: section_name,
|
||||||
section_depth,
|
depth: section_depth,
|
||||||
section_kind,
|
kind: section_kind,
|
||||||
section_refname
|
reference: section_refname
|
||||||
)
|
}
|
||||||
));
|
));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)>
|
||||||
|
{
|
||||||
|
let mut bindings = vec![];
|
||||||
|
|
||||||
|
bindings.push(("push".to_string(), lua.create_function(
|
||||||
|
|_, (title, depth, kind, reference) : (String, usize, String, Option<String>)| {
|
||||||
|
let kind = match kind.as_str() {
|
||||||
|
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
|
||||||
|
"*" => section_kind::NO_NUMBER,
|
||||||
|
"+" => section_kind::NO_TOC,
|
||||||
|
"" => section_kind::NONE,
|
||||||
|
_ => return Err(BadArgument {
|
||||||
|
to: Some("push".to_string()),
|
||||||
|
pos: 3,
|
||||||
|
name: Some("kind".to_string()),
|
||||||
|
cause: Arc::new(mlua::Error::external(
|
||||||
|
format!("Unknown section kind specified")))})
|
||||||
|
};
|
||||||
|
|
||||||
|
CTX.with_borrow(|ctx| ctx.as_ref().map(|ctx| {
|
||||||
|
ctx.parser.push(ctx.document, Box::new(Section {
|
||||||
|
location: ctx.location.clone(),
|
||||||
|
title,
|
||||||
|
depth,
|
||||||
|
kind,
|
||||||
|
reference
|
||||||
|
}));
|
||||||
|
}));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}).unwrap()));
|
||||||
|
|
||||||
|
bindings
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use crate::{compiler::compiler::{Compiler, Target}, document::element::{ElemKind, Element}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, state::State}};
|
use crate::{compiler::compiler::{Compiler, Target}, document::element::{ElemKind, Element}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, state::State}};
|
||||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||||
|
@ -182,4 +183,6 @@ impl RegexRule for StyleRule
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::{io::{Read, Write}, ops::Range, process::{Command, Stdio}, rc::Rc, sync
|
||||||
|
|
||||||
use ariadne::{Fmt, Label, Report, ReportKind};
|
use ariadne::{Fmt, Label, Report, ReportKind};
|
||||||
use crypto::{digest::Digest, sha2::Sha512};
|
use crypto::{digest::Digest, sha2::Sha512};
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
|
|
||||||
use crate::{cache::cache::{Cached, CachedError}, compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util}};
|
use crate::{cache::cache::{Cached, CachedError}, compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util}};
|
||||||
|
@ -260,4 +261,7 @@ impl RegexRule for TexRule
|
||||||
|
|
||||||
reports
|
reports
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
63
src/elements/text.rs
Normal file
63
src/elements/text.rs
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
|
|
||||||
|
use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element}}, lua::kernel::CTX, parser::{rule::Rule, source::Token}};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Text
|
||||||
|
{
|
||||||
|
pub(self) location: Token,
|
||||||
|
pub(self) content: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Text
|
||||||
|
{
|
||||||
|
pub fn new(location: Token, content: String) -> Text
|
||||||
|
{
|
||||||
|
Text {
|
||||||
|
location: location,
|
||||||
|
content: content
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Element for Text
|
||||||
|
{
|
||||||
|
fn location(&self) -> &Token { &self.location }
|
||||||
|
fn kind(&self) -> ElemKind { ElemKind::Inline }
|
||||||
|
fn element_name(&self) -> &'static str { "Text" }
|
||||||
|
fn to_string(&self) -> String { format!("{self:#?}") }
|
||||||
|
|
||||||
|
fn compile(&self, compiler: &Compiler, _document: &Document) -> Result<String, String> {
|
||||||
|
Ok(compiler.sanitize(self.content.as_str()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct TextRule;
|
||||||
|
|
||||||
|
impl Rule for TextRule
|
||||||
|
{
|
||||||
|
fn name(&self) -> &'static str { "Text" }
|
||||||
|
|
||||||
|
fn next_match(&self, cursor: &crate::parser::source::Cursor) -> Option<(usize, Box<dyn std::any::Any>)> { None }
|
||||||
|
|
||||||
|
fn on_match(&self, parser: &dyn crate::parser::parser::Parser, document: &crate::document::document::Document, cursor: crate::parser::source::Cursor, match_data: Option<Box<dyn std::any::Any>>) -> (crate::parser::source::Cursor, Vec<ariadne::Report<'_, (std::rc::Rc<dyn crate::parser::source::Source>, std::ops::Range<usize>)>>) { panic!("Text canno match"); }
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||||
|
let mut bindings = vec![];
|
||||||
|
|
||||||
|
bindings.push(("push".to_string(), lua.create_function(
|
||||||
|
|_, content: String| {
|
||||||
|
CTX.with_borrow(|ctx| ctx.as_ref().map(|ctx| {
|
||||||
|
ctx.parser.push(ctx.document, Box::new(Text {
|
||||||
|
location: ctx.location.clone(),
|
||||||
|
content,
|
||||||
|
}));
|
||||||
|
}));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}).unwrap()));
|
||||||
|
|
||||||
|
bindings
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,4 @@
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use crate::parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token}};
|
use crate::parser::{parser::{Parser, ReportColors}, rule::RegexRule, source::{Source, Token}};
|
||||||
use ariadne::{Report, Fmt, Label, ReportKind};
|
use ariadne::{Report, Fmt, Label, ReportKind};
|
||||||
|
@ -20,7 +21,6 @@ impl VariableRule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn make_variable(&self, colors: &ReportColors, location: Token, kind: usize, name: String, value: String) -> Result<Rc<dyn Variable>, String>
|
pub fn make_variable(&self, colors: &ReportColors, location: Token, kind: usize, name: String, value: String) -> Result<Rc<dyn Variable>, String>
|
||||||
{
|
{
|
||||||
match self.kinds[kind].0.as_str()
|
match self.kinds[kind].0.as_str()
|
||||||
|
@ -89,6 +89,8 @@ impl RegexRule for VariableRule {
|
||||||
|
|
||||||
fn regexes(&self) -> &[Regex] { &self.re }
|
fn regexes(&self) -> &[Regex] { &self.re }
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fn on_regex_match(&self, _: usize, parser: &dyn Parser, document: &Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>
|
fn on_regex_match(&self, _: usize, parser: &dyn Parser, document: &Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>
|
||||||
{
|
{
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
|
@ -197,6 +199,9 @@ impl RegexRule for VariableRule {
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct VariableSubstitutionRule
|
pub struct VariableSubstitutionRule
|
||||||
|
@ -326,4 +331,7 @@ impl RegexRule for VariableSubstitutionRule
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { vec![] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
|
pub mod semantic;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
|
|
@ -1,30 +1,116 @@
|
||||||
use std::{cell::RefCell, collections::HashMap};
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::{elements::registrar::register, lua::kernel::Kernel, parser::{rule::Rule, state::StateHolder}};
|
use crate::parser::source::{Cursor, Source};
|
||||||
|
|
||||||
struct LSParser
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LineCursor
|
||||||
{
|
{
|
||||||
rules: Vec<Box<dyn Rule>>,
|
pub pos: usize,
|
||||||
|
pub line: usize,
|
||||||
// Parser state
|
pub line_pos: usize,
|
||||||
pub state: RefCell<StateHolder>,
|
pub source: Rc<dyn Source>,
|
||||||
//pub kernels: RefCell<HashMap<String, Kernel>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LSParser {
|
impl LineCursor
|
||||||
pub fn default() -> Self
|
|
||||||
{
|
{
|
||||||
let mut parser = LSParser {
|
/// Creates [`LineCursor`] at position
|
||||||
rules: vec![],
|
///
|
||||||
state: RefCell::new(StateHolder::new()),
|
/// # Error
|
||||||
//kernels: RefCell::new(HashMap::new()),
|
/// This function will panic if [`pos`] is not utf8 aligned
|
||||||
};
|
///
|
||||||
|
/// Note: this is a convenience function, it should be used
|
||||||
|
/// with parsimony as it is expensive
|
||||||
|
pub fn at(&mut self, pos: usize)
|
||||||
|
{
|
||||||
|
if pos > self.pos
|
||||||
|
{
|
||||||
|
let start = self.pos;
|
||||||
|
//eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
||||||
|
let mut it = self.source.content()
|
||||||
|
.as_str()[start..] // pos+1
|
||||||
|
.chars()
|
||||||
|
.peekable();
|
||||||
|
|
||||||
// TODO: Main kernel
|
let mut prev = self.source.content()
|
||||||
//register(&mut parser);
|
.as_str()[..start+1]
|
||||||
|
.chars()
|
||||||
|
.rev()
|
||||||
|
.next();
|
||||||
|
//eprintln!("prev={prev:#?}");
|
||||||
|
while self.pos < pos
|
||||||
|
{
|
||||||
|
let c = it.next().unwrap();
|
||||||
|
let len = c.len_utf8();
|
||||||
|
|
||||||
parser
|
self.pos += len;
|
||||||
|
if prev == Some('\n')
|
||||||
|
{
|
||||||
|
self.line += 1;
|
||||||
|
self.line_pos = 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.line_pos += len;
|
||||||
|
}
|
||||||
|
|
||||||
|
//eprintln!("({}, {c:#?}) ({} {})", self.pos, self.line, self.line_pos);
|
||||||
|
prev = Some(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
self.source.content()
|
||||||
|
.as_str()[start..pos+1]
|
||||||
|
.char_indices()
|
||||||
|
.for_each(|(at, c)| {
|
||||||
|
self.pos = at+start;
|
||||||
|
|
||||||
|
if c == '\n'
|
||||||
|
{
|
||||||
|
self.line += 1;
|
||||||
|
self.line_pos = 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.line_pos += c.len_utf8();
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
else if pos < self.pos
|
||||||
|
{
|
||||||
|
todo!("");
|
||||||
|
self.source.content()
|
||||||
|
.as_str()[pos..self.pos]
|
||||||
|
.char_indices()
|
||||||
|
.rev()
|
||||||
|
.for_each(|(len, c)| {
|
||||||
|
self.pos -= len;
|
||||||
|
if c == '\n'
|
||||||
|
{
|
||||||
|
self.line -= 1;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
self.line_pos = self.source.content()
|
||||||
|
.as_str()[..self.pos]
|
||||||
|
.char_indices()
|
||||||
|
.rev()
|
||||||
|
.find(|(_, c)| *c == '\n')
|
||||||
|
.map(|(line_start, _)| self.pos-line_start)
|
||||||
|
.unwrap_or(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// May fail if pos is not utf8-aligned
|
||||||
|
assert_eq!(pos, self.pos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<&LineCursor> for Cursor
|
||||||
|
{
|
||||||
|
fn from(value: &LineCursor) -> Self {
|
||||||
|
Self {
|
||||||
|
pos: value.pos,
|
||||||
|
source: value.source.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
90
src/lsp/semantic.rs
Normal file
90
src/lsp/semantic.rs
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
use std::any::Any;
|
||||||
|
|
||||||
|
use tower_lsp::lsp_types::{SemanticToken, SemanticTokenType};
|
||||||
|
|
||||||
|
use crate::{document::{document::Document, element::Element}, elements::{comment::Comment, paragraph::Paragraph, section::Section}, parser::rule::Rule};
|
||||||
|
|
||||||
|
use super::parser::LineCursor;
|
||||||
|
|
||||||
|
pub trait SemanticProvider: Rule
|
||||||
|
{
|
||||||
|
fn get_semantic_tokens(&self, cursor: &LineCursor, match_data: Box<dyn Any>) -> Vec<SemanticToken>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const LEGEND_TYPE : &[SemanticTokenType] = &[
|
||||||
|
SemanticTokenType::COMMENT,
|
||||||
|
SemanticTokenType::VARIABLE,
|
||||||
|
SemanticTokenType::STRING,
|
||||||
|
SemanticTokenType::PARAMETER,
|
||||||
|
];
|
||||||
|
|
||||||
|
// TODO...
|
||||||
|
pub fn provide(semantic_tokens: &mut Vec<SemanticToken>, cursor: &mut LineCursor, elem: &Box<dyn Element>) {
|
||||||
|
if cursor.source != elem.location().source() { return }
|
||||||
|
|
||||||
|
let prev = cursor.clone();
|
||||||
|
|
||||||
|
if let Some(comm) = elem.downcast_ref::<Comment>()
|
||||||
|
{
|
||||||
|
cursor.at(elem.location().start());
|
||||||
|
let delta_start = if cursor.line == prev.line
|
||||||
|
{
|
||||||
|
cursor.line_pos - prev.line_pos
|
||||||
|
} else if cursor.line == 0 { cursor.line_pos }
|
||||||
|
else { cursor.line_pos+1 };
|
||||||
|
semantic_tokens.push(SemanticToken {
|
||||||
|
delta_line: (cursor.line-prev.line) as u32,
|
||||||
|
delta_start: delta_start as u32,
|
||||||
|
length: (elem.location().end() - elem.location().start()) as u32,
|
||||||
|
token_type: 0,
|
||||||
|
token_modifiers_bitset: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else if let Some(sect) = elem.downcast_ref::<Section>()
|
||||||
|
{
|
||||||
|
eprintln!("section");
|
||||||
|
cursor.at(elem.location().start());
|
||||||
|
let delta_start = if cursor.line == prev.line
|
||||||
|
{
|
||||||
|
cursor.line_pos - prev.line_pos
|
||||||
|
} else if cursor.line == 0 { cursor.line_pos }
|
||||||
|
else { cursor.line_pos+1 };
|
||||||
|
semantic_tokens.push(SemanticToken {
|
||||||
|
delta_line: (cursor.line-prev.line) as u32,
|
||||||
|
delta_start: delta_start as u32,
|
||||||
|
length: (elem.location().end() - elem.location().start()) as u32,
|
||||||
|
token_type: 0,
|
||||||
|
token_modifiers_bitset: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn semantic_token_from_document(document: &Document) -> Vec<SemanticToken>
|
||||||
|
{
|
||||||
|
let mut semantic_tokens = vec![];
|
||||||
|
|
||||||
|
let source = document.source();
|
||||||
|
let mut cursor = LineCursor {
|
||||||
|
pos: 0,
|
||||||
|
line: 0,
|
||||||
|
line_pos: 0,
|
||||||
|
source: source.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
document.content.borrow()
|
||||||
|
.iter()
|
||||||
|
.for_each(|elem| {
|
||||||
|
if let Some(paragraph) = elem.downcast_ref::<Paragraph>()
|
||||||
|
{
|
||||||
|
paragraph.content
|
||||||
|
.iter()
|
||||||
|
.for_each(|elem| provide(&mut semantic_tokens, &mut cursor, elem));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
provide(&mut semantic_tokens, &mut cursor, elem);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
semantic_tokens
|
||||||
|
}
|
|
@ -1,15 +1,70 @@
|
||||||
use std::cell::RefMut;
|
use std::{cell::{RefCell, RefMut}, rc::Rc};
|
||||||
|
|
||||||
use mlua::Lua;
|
use mlua::{Error, FromLua, Lua, UserData, UserDataMethods};
|
||||||
|
|
||||||
|
use crate::{document::document::Document, parser::{parser::Parser, source::Token}};
|
||||||
|
|
||||||
|
pub struct KernelContext<'a>
|
||||||
|
{
|
||||||
|
pub location: Token,
|
||||||
|
pub parser: &'a dyn Parser,
|
||||||
|
pub document: &'a Document<'a>,
|
||||||
|
//pub parser: &'a dyn Parser,
|
||||||
|
}
|
||||||
|
|
||||||
|
thread_local! {
|
||||||
|
pub static CTX: RefCell<Option<KernelContext<'static>>> = RefCell::new(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Kernel
|
pub struct Kernel
|
||||||
{
|
{
|
||||||
pub lua: Lua,
|
lua: Lua,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Kernel {
|
impl Kernel {
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { lua: Lua::new() }
|
// TODO: Take parser as arg and
|
||||||
|
// iterate over the rules
|
||||||
|
// to find export the bindings (if some)
|
||||||
|
pub fn new(parser: &dyn Parser) -> Self {
|
||||||
|
let lua = Lua::new();
|
||||||
|
|
||||||
|
{
|
||||||
|
let nml_table = lua.create_table().unwrap();
|
||||||
|
|
||||||
|
for rule in parser.rules()
|
||||||
|
{
|
||||||
|
let table = lua.create_table().unwrap();
|
||||||
|
let name = rule.name().to_lowercase();
|
||||||
|
|
||||||
|
for (fun_name, fun) in rule.lua_bindings(&lua)
|
||||||
|
{
|
||||||
|
table.set(fun_name, fun).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
nml_table.set(name, table).unwrap();
|
||||||
|
}
|
||||||
|
lua.globals().set("nml", nml_table).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Self { lua }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs a procedure with a context
|
||||||
|
///
|
||||||
|
/// This is the only way lua code shoule be ran, because exported
|
||||||
|
/// functions may require the context in order to operate
|
||||||
|
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F)
|
||||||
|
-> T
|
||||||
|
where
|
||||||
|
F: FnOnce(&Lua) -> T
|
||||||
|
{
|
||||||
|
CTX.set(Some(unsafe { std::mem::transmute(context) }));
|
||||||
|
let ret = f(&self.lua);
|
||||||
|
CTX.set(None);
|
||||||
|
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,11 +2,12 @@ use std::{cell::{RefCell, RefMut}, collections::{HashMap, HashSet}, ops::Range,
|
||||||
|
|
||||||
use ariadne::{Label, Report};
|
use ariadne::{Label, Report};
|
||||||
|
|
||||||
use crate::{document::{document::Document, element::{ElemKind, Element, Text}}, elements::{paragraph::Paragraph, registrar::register}, lua::kernel::{Kernel, KernelHolder}, parser::source::{SourceFile, VirtualSource}};
|
use crate::{document::{document::Document, element::{ElemKind, Element}}, elements::{paragraph::Paragraph, registrar::register, text::Text}, lua::kernel::{Kernel, KernelHolder}, parser::source::{SourceFile, VirtualSource}};
|
||||||
|
|
||||||
use super::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source, Token}, state::StateHolder, util};
|
use super::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source, Token}, state::StateHolder, util};
|
||||||
|
|
||||||
/// Parser for the language
|
/// Parser for the language
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct LangParser
|
pub struct LangParser
|
||||||
{
|
{
|
||||||
rules: Vec<Box<dyn Rule>>,
|
rules: Vec<Box<dyn Rule>>,
|
||||||
|
@ -30,8 +31,9 @@ impl LangParser
|
||||||
kernels: RefCell::new(HashMap::new()),
|
kernels: RefCell::new(HashMap::new()),
|
||||||
};
|
};
|
||||||
register(&mut s);
|
register(&mut s);
|
||||||
|
|
||||||
s.kernels.borrow_mut()
|
s.kernels.borrow_mut()
|
||||||
.insert("main".to_string(), Kernel::new());
|
.insert("main".to_string(), Kernel::new(&s));
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::cell::{Ref, RefCell, RefMut};
|
use std::cell::{Ref, RefMut};
|
||||||
use std::collections::{HashMap, HashSet};
|
|
||||||
use std::ops::Range;
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
|
@ -11,8 +9,9 @@ use super::state::StateHolder;
|
||||||
use crate::document::document::Document;
|
use crate::document::document::Document;
|
||||||
use crate::document::element::Element;
|
use crate::document::element::Element;
|
||||||
use ariadne::Color;
|
use ariadne::Color;
|
||||||
use crate::lua::kernel::{Kernel, KernelHolder};
|
use crate::lua::kernel::KernelHolder;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct ReportColors
|
pub struct ReportColors
|
||||||
{
|
{
|
||||||
pub error: Color,
|
pub error: Color,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use super::parser::Parser;
|
use super::parser::Parser;
|
||||||
use super::source::{Cursor, Source, Token};
|
use super::source::{Cursor, Source, Token};
|
||||||
use ariadne::Report;
|
use ariadne::Report;
|
||||||
|
use mlua::{Function, Lua};
|
||||||
use crate::document::document::Document;
|
use crate::document::document::Document;
|
||||||
|
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
@ -14,6 +15,15 @@ pub trait Rule {
|
||||||
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
|
fn next_match(&self, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)>;
|
||||||
/// Callback when rule matches
|
/// Callback when rule matches
|
||||||
fn on_match(&self, parser: &dyn Parser, document: &Document, cursor: Cursor, match_data: Option<Box<dyn Any>>) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>);
|
fn on_match(&self, parser: &dyn Parser, document: &Document, cursor: Cursor, match_data: Option<Box<dyn Any>>) -> (Cursor, Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>);
|
||||||
|
/// Export bindings to lua
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for dyn Rule
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "Rule{{{}}}", self.name())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -64,6 +74,8 @@ pub trait RegexRule
|
||||||
|
|
||||||
/// Callback on regex rule match
|
/// Callback on regex rule match
|
||||||
fn on_regex_match(&self, index: usize, parser: &dyn Parser, document: &Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>;
|
fn on_regex_match(&self, index: usize, parser: &dyn Parser, document: &Document, token: Token, matches: regex::Captures) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>>;
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, _lua: &'lua Lua) -> Vec<(String, Function<'lua>)>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: RegexRule> Rule for T {
|
impl<T: RegexRule> Rule for T {
|
||||||
|
@ -100,4 +112,6 @@ impl<T: RegexRule> Rule for T {
|
||||||
let token_end = token.end();
|
let token_end = token.end();
|
||||||
return (cursor.at(token_end), self.on_regex_match(*index, parser, document, token, captures));
|
return (cursor.at(token_end), self.on_regex_match(*index, parser, document, token, captures));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lua_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> { self.lua_bindings(lua) }
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ use std::{fs, ops::Range, rc::Rc};
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
|
|
||||||
use downcast_rs::{impl_downcast, Downcast};
|
use downcast_rs::{impl_downcast, Downcast};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// Trait for source content
|
/// Trait for source content
|
||||||
pub trait Source: Downcast
|
pub trait Source: Downcast
|
||||||
|
@ -69,6 +70,15 @@ impl SourceFile
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn with_content(path: String, content: String, location: Option<Token>) -> Self
|
||||||
|
{
|
||||||
|
Self {
|
||||||
|
location: location,
|
||||||
|
path: path,
|
||||||
|
content: content,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Source for SourceFile
|
impl Source for SourceFile
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::document::document::Document;
|
||||||
use super::{parser::Parser, source::Source};
|
use super::{parser::Parser, source::Source};
|
||||||
|
|
||||||
/// Scope for state objects
|
/// Scope for state objects
|
||||||
#[derive(PartialEq, PartialOrd)]
|
#[derive(PartialEq, PartialOrd, Debug)]
|
||||||
pub enum Scope
|
pub enum Scope
|
||||||
{
|
{
|
||||||
/// Global state
|
/// Global state
|
||||||
|
@ -31,7 +31,15 @@ pub trait State: Downcast
|
||||||
}
|
}
|
||||||
impl_downcast!(State);
|
impl_downcast!(State);
|
||||||
|
|
||||||
|
impl core::fmt::Debug for dyn State
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "State{{Scope: {:#?}}}", self.scope())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Object owning all the states
|
/// Object owning all the states
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct StateHolder
|
pub struct StateHolder
|
||||||
{
|
{
|
||||||
data: HashMap<String, Rc<RefCell<dyn State>>>
|
data: HashMap<String, Rc<RefCell<dyn State>>>
|
||||||
|
|
112
src/server.rs
112
src/server.rs
|
@ -12,7 +12,12 @@ use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use document::variable::Variable;
|
use document::document::Document;
|
||||||
|
use document::element::Element;
|
||||||
|
use lsp::semantic::{semantic_token_from_document, LEGEND_TYPE};
|
||||||
|
use parser::langparser::LangParser;
|
||||||
|
use parser::parser::Parser;
|
||||||
|
use parser::source::SourceFile;
|
||||||
use tower_lsp::jsonrpc::Result;
|
use tower_lsp::jsonrpc::Result;
|
||||||
use tower_lsp::lsp_types::*;
|
use tower_lsp::lsp_types::*;
|
||||||
use tower_lsp::{Client, LanguageServer, LspService, Server};
|
use tower_lsp::{Client, LanguageServer, LspService, Server};
|
||||||
|
@ -21,84 +26,32 @@ use tower_lsp::{Client, LanguageServer, LspService, Server};
|
||||||
struct Backend {
|
struct Backend {
|
||||||
client: Client,
|
client: Client,
|
||||||
document_map: DashMap<String, String>,
|
document_map: DashMap<String, String>,
|
||||||
|
//ast_map: DashMap<String, Vec<Box<dyn Element>>>,
|
||||||
//variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>,
|
//variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>,
|
||||||
|
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct TextDocumentItem {
|
struct TextDocumentItem {
|
||||||
uri: Url,
|
uri: Url,
|
||||||
text: String,
|
text: String,
|
||||||
version: i32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Backend {
|
impl Backend {
|
||||||
async fn on_change(&self, params: TextDocumentItem) {
|
async fn on_change(&self, params: TextDocumentItem) {
|
||||||
self.document_map
|
self.document_map
|
||||||
.insert(params.uri.to_string(), params.text.clone());
|
.insert(params.uri.to_string(), params.text.clone());
|
||||||
let ParserResult {
|
|
||||||
ast,
|
|
||||||
parse_errors,
|
|
||||||
semantic_tokens,
|
|
||||||
} = parse(¶ms.text);
|
|
||||||
let diagnostics = parse_errors
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|item| {
|
|
||||||
let (message, span) = match item.reason() {
|
|
||||||
chumsky::error::SimpleReason::Unclosed { span, delimiter } => {
|
|
||||||
(format!("Unclosed delimiter {}", delimiter), span.clone())
|
|
||||||
}
|
|
||||||
chumsky::error::SimpleReason::Unexpected => (
|
|
||||||
format!(
|
|
||||||
"{}, expected {}",
|
|
||||||
if item.found().is_some() {
|
|
||||||
"Unexpected token in input"
|
|
||||||
} else {
|
|
||||||
"Unexpected end of input"
|
|
||||||
},
|
|
||||||
if item.expected().len() == 0 {
|
|
||||||
"something else".to_string()
|
|
||||||
} else {
|
|
||||||
item.expected()
|
|
||||||
.map(|expected| match expected {
|
|
||||||
Some(expected) => expected.to_string(),
|
|
||||||
None => "end of input".to_string(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(", ")
|
|
||||||
}
|
|
||||||
),
|
|
||||||
item.span(),
|
|
||||||
),
|
|
||||||
chumsky::error::SimpleReason::Custom(msg) => (msg.to_string(), item.span()),
|
|
||||||
};
|
|
||||||
|
|
||||||
|| -> Option<Diagnostic> {
|
// TODO: Create a custom parser for the lsp
|
||||||
// let start_line = rope.try_char_to_line(span.start)?;
|
// Which will require a dyn Document to work
|
||||||
// let first_char = rope.try_line_to_char(start_line)?;
|
let source = SourceFile::with_content(
|
||||||
// let start_column = span.start - first_char;
|
params.uri.to_string(),
|
||||||
let start_position = offset_to_position(span.start, &rope)?;
|
params.text.clone(),
|
||||||
let end_position = offset_to_position(span.end, &rope)?;
|
None);
|
||||||
// let end_line = rope.try_char_to_line(span.end)?;
|
let parser = LangParser::default();
|
||||||
// let first_char = rope.try_line_to_char(end_line)?;
|
let doc = parser.parse(Rc::new(source), None);
|
||||||
// let end_column = span.end - first_char;
|
|
||||||
Some(Diagnostic::new_simple(
|
|
||||||
Range::new(start_position, end_position),
|
|
||||||
message,
|
|
||||||
))
|
|
||||||
}()
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
self.client
|
let semantic_tokens = semantic_token_from_document(&doc);
|
||||||
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
if let Some(ast) = ast {
|
|
||||||
self.ast_map.insert(params.uri.to_string(), ast);
|
|
||||||
}
|
|
||||||
// self.client
|
|
||||||
// .log_message(MessageType::INFO, &format!("{:?}", semantic_tokens))
|
|
||||||
// .await;
|
|
||||||
self.semantic_token_map
|
self.semantic_token_map
|
||||||
.insert(params.uri.to_string(), semantic_tokens);
|
.insert(params.uri.to_string(), semantic_tokens);
|
||||||
}
|
}
|
||||||
|
@ -135,7 +88,7 @@ impl LanguageServer for Backend {
|
||||||
semantic_tokens_options: SemanticTokensOptions {
|
semantic_tokens_options: SemanticTokensOptions {
|
||||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||||
legend: SemanticTokensLegend {
|
legend: SemanticTokensLegend {
|
||||||
token_types: vec![SemanticTokenType::COMMENT, SemanticTokenType::MACRO],
|
token_types: LEGEND_TYPE.into(),
|
||||||
token_modifiers: vec![],
|
token_modifiers: vec![],
|
||||||
},
|
},
|
||||||
range: None, //Some(true),
|
range: None, //Some(true),
|
||||||
|
@ -167,7 +120,6 @@ impl LanguageServer for Backend {
|
||||||
self.on_change(TextDocumentItem {
|
self.on_change(TextDocumentItem {
|
||||||
uri: params.text_document.uri,
|
uri: params.text_document.uri,
|
||||||
text: params.text_document.text,
|
text: params.text_document.text,
|
||||||
version: params.text_document.version,
|
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -176,7 +128,6 @@ impl LanguageServer for Backend {
|
||||||
self.on_change(TextDocumentItem {
|
self.on_change(TextDocumentItem {
|
||||||
uri: params.text_document.uri,
|
uri: params.text_document.uri,
|
||||||
text: std::mem::take(&mut params.content_changes[0].text),
|
text: std::mem::take(&mut params.content_changes[0].text),
|
||||||
version: params.text_document.version,
|
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
@ -200,22 +151,17 @@ impl LanguageServer for Backend {
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::LOG, "semantic_token_full")
|
.log_message(MessageType::LOG, "semantic_token_full")
|
||||||
.await;
|
.await;
|
||||||
let semantic_tokens = || -> Option<Vec<SemanticToken>> {
|
|
||||||
let semantic_tokens = vec![
|
if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) {
|
||||||
SemanticToken {
|
let data = semantic_tokens.iter()
|
||||||
delta_line: 1,
|
.filter_map(|token| {
|
||||||
delta_start: 2,
|
Some(token.clone())
|
||||||
length: 5,
|
})
|
||||||
token_type: 1,
|
.collect::<Vec<_>>();
|
||||||
token_modifiers_bitset: 0,
|
|
||||||
}
|
|
||||||
];
|
|
||||||
Some(semantic_tokens)
|
|
||||||
}();
|
|
||||||
if let Some(semantic_token) = semantic_tokens {
|
|
||||||
return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||||
result_id: None,
|
result_id: None,
|
||||||
data: semantic_token,
|
data: data,
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -230,7 +176,9 @@ async fn main() {
|
||||||
let (service, socket) = LspService::new(
|
let (service, socket) = LspService::new(
|
||||||
|client|
|
|client|
|
||||||
Backend {
|
Backend {
|
||||||
client
|
client,
|
||||||
|
document_map: DashMap::new(),
|
||||||
|
semantic_token_map: DashMap::new(),
|
||||||
});
|
});
|
||||||
Server::new(stdin, stdout, socket).serve(service).await;
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue