From 5ccd8048c2a6f99b5d00b0c576c6791704bdd336 Mon Sep 17 00:00:00 2001 From: ef3d0c3e Date: Wed, 24 Jul 2024 09:05:57 +0200 Subject: [PATCH] Add some tests --- src/elements/code.rs | 40 ++- src/elements/raw.rs | 40 ++- src/lsp/parser.rs | 74 ++-- src/lsp/semantic.rs | 2 +- src/parser/langparser.rs | 27 +- src/parser/parser.rs | 34 +- src/parser/util.rs | 710 ++++++++++++++++++++++++--------------- src/server.rs | 6 +- 8 files changed, 579 insertions(+), 354 deletions(-) diff --git a/src/elements/code.rs b/src/elements/code.rs index 5921a53..164f109 100644 --- a/src/elements/code.rs +++ b/src/elements/code.rs @@ -6,7 +6,7 @@ use mlua::{Function, Lua}; use regex::{Captures, Regex}; use syntect::{easy::HighlightLines, highlighting::ThemeSet, parsing::SyntaxSet}; -use crate::{cache::cache::{Cached, CachedError}, compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyParser}}}; +use crate::{cache::cache::{Cached, CachedError}, compiler::compiler::{Compiler, Target}, document::{document::Document, element::{ElemKind, Element}}, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyMapError, PropertyParser}}}; use lazy_static::lazy_static; #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -341,18 +341,32 @@ impl RegexRule for CodeRule |prop, value| value.parse::().map_err(|e| (prop, e))) { Ok((_prop, offset)) => offset, - Err((prop, e)) => { - reports.push( - Report::build(ReportKind::Error, token.source(), token.start()) - .with_message("Invalid Code Property") - .with_label( - Label::new((token.source().clone(), token.start()+1..token.end())) - .with_message(format!("Property `line_offset: {}` cannot be converted: {}", - prop.fg(parser.colors().info), - e.fg(parser.colors().error))) - .with_color(parser.colors().warning)) - .finish()); - return reports; + Err(e) => match e { + PropertyMapError::ParseError((prop, err)) => { + reports.push( + Report::build(ReportKind::Error, token.source(), token.start()) + .with_message("Invalid Code Property") + .with_label( + Label::new((token.source().clone(), token.start()+1..token.end())) + .with_message(format!("Property `line_offset: {}` cannot be converted: {}", + prop.fg(parser.colors().info), + err.fg(parser.colors().error))) + .with_color(parser.colors().warning)) + .finish()); + return reports; + }, + PropertyMapError::NotFoundError(err) => { + reports.push( + Report::build(ReportKind::Error, token.source(), token.start()) + .with_message("Invalid Code Property") + .with_label( + Label::new((token.source().clone(), token.start()+1..token.end())) + .with_message(format!("Property `{}` doesn't exist", + err.fg(parser.colors().info))) + .with_color(parser.colors().warning)) + .finish()); + return reports; + } } }; diff --git a/src/elements/raw.rs b/src/elements/raw.rs index 3f51085..ccaab1f 100644 --- a/src/elements/raw.rs +++ b/src/elements/raw.rs @@ -1,6 +1,6 @@ use mlua::{Error::BadArgument, Function, Lua}; use regex::{Captures, Regex}; -use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element}}, lua::kernel::CTX, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyParser}}}; +use crate::{compiler::compiler::Compiler, document::{document::Document, element::{ElemKind, Element}}, lua::kernel::CTX, parser::{parser::Parser, rule::RegexRule, source::{Source, Token}, util::{self, Property, PropertyMapError, PropertyParser}}}; use ariadne::{Fmt, Label, Report, ReportKind}; use std::{collections::HashMap, ops::Range, rc::Rc, str::FromStr, sync::Arc}; @@ -138,18 +138,32 @@ impl RegexRule for RawRule |prop, value| ElemKind::from_str(value.as_str()).map_err(|e| (prop, e))) { Ok((_prop, kind)) => kind, - Err((prop, e)) => { - reports.push( - Report::build(ReportKind::Error, token.source(), token.start()) - .with_message("Invalid Raw Code Property") - .with_label( - Label::new((token.source().clone(), token.range.clone())) - .with_message(format!("Property `kind: {}` cannot be converted: {}", - prop.fg(parser.colors().info), - e.fg(parser.colors().error))) - .with_color(parser.colors().warning)) - .finish()); - return reports; + Err(e) => match e { + PropertyMapError::ParseError((prop, err)) => { + reports.push( + Report::build(ReportKind::Error, token.source(), token.start()) + .with_message("Invalid Raw Code Property") + .with_label( + Label::new((token.source().clone(), token.range.clone())) + .with_message(format!("Property `kind: {}` cannot be converted: {}", + prop.fg(parser.colors().info), + err.fg(parser.colors().error))) + .with_color(parser.colors().warning)) + .finish()); + return reports; + }, + PropertyMapError::NotFoundError(err) => { + reports.push( + Report::build(ReportKind::Error, token.source(), token.start()) + .with_message("Invalid Code Property") + .with_label( + Label::new((token.source().clone(), token.start()+1..token.end())) + .with_message(format!("Property `{}` doesn't exist", + err.fg(parser.colors().info))) + .with_color(parser.colors().warning)) + .finish()); + return reports; + } } }; diff --git a/src/lsp/parser.rs b/src/lsp/parser.rs index 320fd9b..9f517b3 100644 --- a/src/lsp/parser.rs +++ b/src/lsp/parser.rs @@ -1,6 +1,6 @@ -use std::rc::Rc; +use std::{cell::{RefCell, RefMut}, collections::HashMap, rc::Rc}; -use crate::parser::source::{Cursor, Source}; +use crate::{document::{document::Document, element::Element}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}}; #[derive(Debug, Clone)] pub struct LineCursor @@ -56,26 +56,6 @@ impl LineCursor //eprintln!("({}, {c:#?}) ({} {})", self.pos, self.line, self.line_pos); prev = Some(c); } - - /* - self.source.content() - .as_str()[start..pos+1] - .char_indices() - .for_each(|(at, c)| { - self.pos = at+start; - - if c == '\n' - { - self.line += 1; - self.line_pos = 0; - } - else - { - self.line_pos += c.len_utf8(); - } - - }); - */ } else if pos < self.pos { @@ -114,3 +94,53 @@ impl From<&LineCursor> for Cursor } } } + +#[derive(Debug)] +pub struct LsParser +{ + rules: Vec>, + colors: ReportColors, + + // Parser state + pub state: RefCell, + pub kernels: RefCell>, +} + +impl Parser for LsParser +{ + fn colors(&self) -> &ReportColors { &self.colors } + fn rules(&self) -> &Vec> { &self.rules } + fn rules_mut(&mut self) -> &mut Vec> { &mut self.rules } + + fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() } + fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() } + + fn push<'a>(&self, doc: &dyn Document, elem: Box) { + todo!() + } + + fn parse<'a>(&self, source: Rc, parent: Option<&'a dyn Document<'a>>) -> Box+'a> { + todo!() + } + + fn parse_into<'a>(&self, source: Rc, document: &'a dyn Document<'a>) { + todo!() + } +} + +impl KernelHolder for LsParser +{ + fn get_kernel(&self, name: &str) + -> Option> { + RefMut::filter_map(self.kernels.borrow_mut(), + |map| map.get_mut(name)).ok() + } + + fn insert_kernel(&self, name: String, kernel: Kernel) + -> RefMut<'_, Kernel> { + //TODO do not get + self.kernels.borrow_mut() + .insert(name.clone(), kernel); + self.get_kernel(name.as_str()).unwrap() + } +} diff --git a/src/lsp/semantic.rs b/src/lsp/semantic.rs index cebcf68..de00f56 100644 --- a/src/lsp/semantic.rs +++ b/src/lsp/semantic.rs @@ -59,7 +59,7 @@ pub fn provide(semantic_tokens: &mut Vec, cursor: &mut LineCursor } } -pub fn semantic_token_from_document(document: &Document) -> Vec +pub fn semantic_token_from_document(document: &dyn Document) -> Vec { let mut semantic_tokens = vec![]; diff --git a/src/parser/langparser.rs b/src/parser/langparser.rs index 80f000f..2221ce7 100644 --- a/src/parser/langparser.rs +++ b/src/parser/langparser.rs @@ -101,32 +101,7 @@ impl Parser for LangParser fn colors(&self) -> &ReportColors { &self.colors } fn rules(&self) -> &Vec> { &self.rules } - fn add_rule(&mut self, rule: Box, after: Option<&'static str>) - { - // Error on duplicate rule - let rule_name = (*rule).name(); - self.rules.iter().for_each(|rule| { - if (*rule).name() != rule_name { return; } - - panic!("Attempted to introduce duplicate rule: `{rule_name}`"); - }); - - match after - { - Some(name) => { - let before = self.rules.iter() - .enumerate() - .find(|(_pos, r)| (r).name() == name); - - match before - { - Some((pos, _)) => self.rules.insert(pos+1, rule), - _ => panic!("Unable to find rule named `{name}`, to insert rule `{}` after it", rule.name()) - } - } - _ => self.rules.push(rule) - } - } + fn rules_mut(&mut self) -> &mut Vec> { &mut self.rules } fn state(&self) -> std::cell::Ref<'_, StateHolder> { self.state.borrow() } fn state_mut(&self) -> std::cell::RefMut<'_, StateHolder> { self.state.borrow_mut() } diff --git a/src/parser/parser.rs b/src/parser/parser.rs index be104a1..037e9b6 100644 --- a/src/parser/parser.rs +++ b/src/parser/parser.rs @@ -48,7 +48,39 @@ pub trait Parser: KernelHolder fn colors(&self) -> &ReportColors; fn rules(&self) -> &Vec>; - fn add_rule(&mut self, rule: Box, after: Option<&'static str>); + fn rules_mut(&mut self) -> &mut Vec>; + + fn add_rule(&mut self, rule: Box, after: Option<&'static str>) -> Result<(), String> + { + // Error on duplicate rule + let rule_name = (*rule).name(); + if let Err(e) = self.rules().iter().try_for_each(|rule| { + if (*rule).name() != rule_name { return Ok(()); } + + return Err(format!("Attempted to introduce duplicate rule: `{rule_name}`")); + }) + { + return Err(e) + } + + match after + { + Some(name) => { + let before = self.rules().iter() + .enumerate() + .find(|(_pos, r)| (r).name() == name); + + match before + { + Some((pos, _)) => self.rules_mut().insert(pos+1, rule), + _ => return Err(format!("Unable to find rule named `{name}`, to insert rule `{}` after it", rule.name())) + } + } + _ => self.rules_mut().push(rule) + } + + Ok(()) + } fn state(&self) -> Ref<'_, StateHolder>; fn state_mut(&self) -> RefMut<'_, StateHolder>; diff --git a/src/parser/util.rs b/src/parser/util.rs index 219ef37..6a2a679 100644 --- a/src/parser/util.rs +++ b/src/parser/util.rs @@ -2,90 +2,88 @@ use std::collections::HashMap; use unicode_segmentation::UnicodeSegmentation; -use crate::{document::{document::{Document, DocumentAccessors}, element::ElemKind}, elements::paragraph::Paragraph}; +use crate::{ + document::{ + document::{Document, DocumentAccessors}, + element::ElemKind, + }, + elements::paragraph::Paragraph, +}; /// Processes text for escape characters and paragraphing -pub fn process_text(document: &dyn Document, content: &str) -> String -{ - let mut escaped = false; - let mut newlines = 0usize; // Consecutive newlines - //println!("Processing: [{content}]"); - let processed = content - .grapheme_indices(true) - .fold((String::new(), None), - |(mut out, prev), (_pos, g)| { - if newlines != 0 && g != "\n" - { - newlines = 0; +pub fn process_text(document: &dyn Document, content: &str) -> String { + let mut escaped = false; + let mut newlines = 0usize; // Consecutive newlines + //println!("Processing: [{content}]"); + let processed = content + .graphemes(true) + .fold((String::new(), None), |(mut out, prev), g| { + if newlines != 0 && g != "\n" { + newlines = 0; - // Add a whitespace if necessary - match out.chars().last() - { - Some(c) => { - // NOTE: \n is considered whitespace, so previous codepoint can be \n - // (Which can only be done by escaping it) - if !c.is_whitespace() || c == '\n' - { - out += " "; - } - } - None => { - if document.last_element::() - .and_then(|par| par.find_back(|e| e.kind() != ElemKind::Invisible) - .and_then(|e| Some(e.kind() == ElemKind::Inline))) - .unwrap_or(false) - { - out += " "; - } - } // Don't output anything - } - } + // Add a whitespace if necessary + match out.chars().last() { + Some(c) => { + // NOTE: \n is considered whitespace, so previous codepoint can be \n + // (Which can only be done by escaping it) + if !c.is_whitespace() || c == '\n' { + out += " "; + } + } + None => { + if document + .last_element::() + .and_then(|par| { + par.find_back(|e| e.kind() != ElemKind::Invisible) + .and_then(|e| Some(e.kind() == ElemKind::Inline)) + }) + .unwrap_or(false) + { + out += " "; + } + } // Don't output anything + } + } - // Output grapheme literally when escaped - if escaped - { - escaped = false; - return (out + g, Some(g)); - } - // Increment newlines counter - else if g == "\n" - { - newlines += 1; - return (out, Some(g)); - } - // Determine if escaped - else if g == "\\" - { - escaped = !escaped; - return (out, Some(g)); - } - // Whitespaces - else if g.chars().count() == 1 && g.chars().last().unwrap().is_whitespace() - { - // Content begins with whitespace - if prev.is_none() - { - if document.last_element::().is_some() - { - return (out+g, Some(g)); - } - else - { - return (out, Some(g)); - } - } - // Consecutive whitespaces are converted to a single whitespace - else if prev.unwrap().chars().count() == 1 && - prev.unwrap().chars().last().unwrap().is_whitespace() - { - return (out, Some(g)); - } - } + // Output grapheme literally when escaped + if escaped { + escaped = false; + return (out + g, Some(g)); + } + // Increment newlines counter + else if g == "\n" { + newlines += 1; + return (out, Some(g)); + } + // Determine if escaped + else if g == "\\" { + escaped = !escaped; + return (out, Some(g)); + } + // Whitespaces + else if g.chars().count() == 1 && g.chars().last().unwrap().is_whitespace() { + // Content begins with whitespace + if prev.is_none() { + if document.last_element::().is_some() { + return (out + g, Some(g)); + } else { + return (out, Some(g)); + } + } + // Consecutive whitespaces are converted to a single whitespace + else if prev.unwrap().chars().count() == 1 + && prev.unwrap().chars().last().unwrap().is_whitespace() + { + return (out, Some(g)); + } + } - return (out + g, Some(g)); - }).0.to_string(); + return (out + g, Some(g)); + }) + .0 + .to_string(); - return processed; + return processed; } /// Processed a string and escapes a single token out of it @@ -94,104 +92,124 @@ pub fn process_text(document: &dyn Document, content: &str) -> String /// # Example /// ``` /// assert_eq!(process_escaped('\\', "%", "escaped: \\%, also escaped: \\\\\\%, untouched: \\a"), -/// "escaped: %, also escaped: \\%, untouched \\a"); +/// "escaped: %, also escaped: \\%, untouched: \\a"); /// ``` -pub fn process_escaped>(escape: char, token: &'static str, content: S) -> String -{ - let mut processed = String::new(); - let mut escaped = 0; - let mut token_it = token.chars().peekable(); - for c in content.as_ref().chars() - .as_str() - .trim_start() - .trim_end() - .chars() - { - if c == escape - { - escaped += 1; - } - else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) - { - let _ = token_it.next(); - if token_it.peek() == None - { - (0..((escaped-1)/2)) - .for_each(|_| processed.push(escape)); - escaped = 0; - token_it = token.chars().peekable(); - processed.push_str(token); - } - } - else - { - if escaped != 0 - { - // Add untouched escapes - (0..escaped).for_each(|_| processed.push('\\')); - token_it = token.chars().peekable(); - escaped = 0; - } - processed.push(c); - } - } - // Add trailing escapes - (0..escaped).for_each(|_| processed.push('\\')); +pub fn process_escaped>(escape: char, token: &'static str, content: S) -> String { + let mut processed = String::new(); + let mut escaped = 0; + let mut token_it = token.chars().peekable(); + for c in content + .as_ref() + .chars() + .as_str() + .trim_start() + .trim_end() + .chars() + { + if c == escape { + escaped += 1; + } else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) { + let _ = token_it.next(); + if token_it.peek() == None { + (0..(escaped / 2)).for_each(|_| processed.push(escape)); + escaped = 0; + token_it = token.chars().peekable(); + processed.push_str(token); + } + } else { + if escaped != 0 { + // Add untouched escapes + (0..escaped).for_each(|_| processed.push('\\')); + token_it = token.chars().peekable(); + escaped = 0; + } + processed.push(c); + } + } + // Add trailing escapes + (0..escaped).for_each(|_| processed.push('\\')); - processed + processed } #[derive(Debug)] -pub struct Property -{ - required: bool, - description: String, - default: Option, +pub struct Property { + required: bool, + description: String, + default: Option, } impl Property { pub fn new(required: bool, description: String, default: Option) -> Self { - Self { required, description, default } + Self { + required, + description, + default, + } } } -impl core::fmt::Display for Property -{ +impl core::fmt::Display for Property { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self.default.as_ref() - { - None => write!(f, "{} {}", - ["[Opt]", "[Req]"][self.required as usize], - self.description), - Some(default) => write!(f, "{} {} (Deafult: {})", - ["[Opt]", "[Req]"][self.required as usize], - self.description, - default) - } + match self.default.as_ref() { + None => write!( + f, + "{} {}", + ["[Opt]", "[Req]"][self.required as usize], + self.description + ), + Some(default) => write!( + f, + "{} {} (Deafult: {})", + ["[Opt]", "[Req]"][self.required as usize], + self.description, + default + ), + } } } #[derive(Debug)] -pub struct PropertyMap<'a> -{ - pub(crate) properties: HashMap +pub enum PropertyMapError { + ParseError(E), + NotFoundError(String), +} + +#[derive(Debug)] +pub struct PropertyMap<'a> { + pub(crate) properties: HashMap, } impl<'a> PropertyMap<'a> { pub fn new() -> Self { - Self { properties: HashMap::new() } + Self { + properties: HashMap::new(), + } } - pub fn get Result>(&self, name: &str, f: F) - -> Result<(&'a Property, T), Error> { - let (prop, value) = self.properties.get(name).unwrap(); + pub fn get Result>( + &self, + name: &str, + f: F, + ) -> Result<(&'a Property, T), PropertyMapError> { + let (prop, value) = match self.properties.get(name) { + Some(found) => found, + None => { + return Err(PropertyMapError::NotFoundError(format!( + "Property `{name}` not found" + ))) + } + }; - f(prop, value).and_then(|value| Ok((*prop, value))) - } + match f(prop, value) { + Ok(parsed) => Ok((*prop, parsed)), + Err(err) => Err(PropertyMapError::ParseError(err)), + } + } } pub struct PropertyParser { - properties: HashMap, + properties: HashMap, } impl PropertyParser { @@ -199,63 +217,60 @@ impl PropertyParser { Self { properties } } - /// Attempts to build a default propertymap - /// - /// Returns an error if at least one [`Property`] is required and doesn't provide a default - pub fn default(&self) -> Result, String> { - let mut properties = PropertyMap::new(); + /// Attempts to build a default propertymap + /// + /// Returns an error if at least one [`Property`] is required and doesn't provide a default + pub fn default(&self) -> Result, String> { + let mut properties = PropertyMap::new(); - for (name, prop) in &self.properties - { - match (prop.required, prop.default.as_ref()) - { - (true, None) => return Err(format!("Missing property `{name}` {prop}")), - (false, None) => {}, - (_, Some(default)) => { - properties.properties.insert( - name.clone(), - (prop, default.clone()) - ); - } - } - } + for (name, prop) in &self.properties { + match (prop.required, prop.default.as_ref()) { + (true, None) => return Err(format!("Missing property `{name}` {prop}")), + (false, None) => {} + (_, Some(default)) => { + properties + .properties + .insert(name.clone(), (prop, default.clone())); + } + } + } - Ok(properties) - } + Ok(properties) + } - /// Parses properties string "prop1=value1, prop2 = val\,2" -> {prop1: value1, prop2: val,2} - /// - /// # Key-value pair - /// - /// Property names/values are separated by a single '=' that cannot be escaped. - /// Therefore names cannot contain the '=' character. - /// - /// # Example - /// - /// ``` - /// let properties = HashMap::new(); - /// properties.insert("width", Property::new(true, "Width of the element in em", None)); - /// - /// let parser = PropertyParser::new(properties); - /// let pm = parser.parse("width=15").unwrap(); - /// - /// assert!(pm.get("width", |_, val| val.parse::()) == Ok(15)); - /// ``` - /// # Return value - /// - /// Returns the parsed property map, or an error if either: - /// * A required property is missing - /// * An unknown property is present - /// * A duplicate property is present - /// - /// Note: Only ',' inside values can be escaped, other '\' are treated literally - pub fn parse(&self, content: &str) -> Result, String> { - let mut properties = PropertyMap::new(); - let mut try_insert = |name: &String, value: &String| - -> Result<(), String> { - let trimmed_name = name.trim_end().trim_start(); - let trimmed_value = value.trim_end().trim_start(); - let prop = match self.properties.get(trimmed_name) + /// Parses properties string "prop1=value1, prop2 = val\,2" -> {prop1: value1, prop2: val,2} + /// + /// # Key-value pair + /// + /// Property names/values are separated by a single '=' that cannot be escaped. + /// Therefore names cannot contain the '=' character. + /// + /// # Example + /// + /// ``` + /// let mut properties = HashMap::new(); + /// properties.insert("width".to_string(), + /// Property::new(true, "Width of the element in em".to_string(), None)); + /// + /// let parser = PropertyParser::new(properties); + /// let pm = parser.parse("width=15").unwrap(); + /// + /// assert_eq!(pm.get("width", |_, s| s.parse::()).unwrap().1, 15); + /// ``` + /// # Return value + /// + /// Returns the parsed property map, or an error if either: + /// * A required property is missing + /// * An unknown property is present + /// * A duplicate property is present + /// + /// Note: Only ',' inside values can be escaped, other '\' are treated literally + pub fn parse(&self, content: &str) -> Result, String> { + let mut properties = PropertyMap::new(); + let mut try_insert = |name: &String, value: &String| -> Result<(), String> { + let trimmed_name = name.trim_end().trim_start(); + let trimmed_value = value.trim_end().trim_start(); + let prop = match self.properties.get(trimmed_name) { None => return Err(format!("Unknown property name: `{trimmed_name}` (with value: `{trimmed_value}`). Valid properties are:\n{}", self.properties.iter().fold(String::new(), @@ -263,81 +278,226 @@ impl PropertyParser { Some(prop) => prop }; - if let Some((_, previous)) = properties.properties.insert( - trimmed_name.to_string(), - (prop, trimmed_value.to_string())) - { - return Err(format!("Duplicate property `{trimmed_name}`, previous value: `{previous}` current value: `{trimmed_value}`")) - } + if let Some((_, previous)) = properties + .properties + .insert(trimmed_name.to_string(), (prop, trimmed_value.to_string())) + { + return Err(format!("Duplicate property `{trimmed_name}`, previous value: `{previous}` current value: `{trimmed_value}`")); + } - Ok(()) - }; + Ok(()) + }; - let mut in_name = true; - let mut name = String::new(); - let mut value = String::new(); - let mut escaped = 0usize; - for c in content.chars() - { - if c == '\\' - { - escaped += 1; - } - else if c == '=' && in_name - { - in_name = false; - (0..escaped).for_each(|_| name.push('\\')); - escaped = 0; - } - else if c == ',' && !in_name - { - if escaped % 2 == 0 // Not escaped - { - (0..escaped/2).for_each(|_| value.push('\\')); - escaped = 0; - in_name = true; + let mut in_name = true; + let mut name = String::new(); + let mut value = String::new(); + let mut escaped = 0usize; + for c in content.chars() { + if c == '\\' { + escaped += 1; + } else if c == '=' && in_name { + in_name = false; + (0..escaped).for_each(|_| name.push('\\')); + escaped = 0; + } else if c == ',' && !in_name { + if escaped % 2 == 0 + // Not escaped + { + (0..escaped / 2).for_each(|_| value.push('\\')); + escaped = 0; + in_name = true; - if let Err(e) = try_insert(&name, &value) { - return Err(e) - } - name.clear(); - value.clear(); - } - else - { - (0..(escaped-1)/2).for_each(|_| value.push('\\')); - value.push(','); - escaped = 0; - } - } - else - { - if in_name { - (0..escaped).for_each(|_| name.push('\\')); - name.push(c) - } - else { - (0..escaped).for_each(|_| value.push('\\')); - value.push(c) - } - escaped = 0; - } - } - if !in_name && value.trim_end().trim_start().is_empty() - { - return Err("Expected a value after last `=`".to_string()) - } - else if name.is_empty() || value.is_empty() - { - return Err("Expected non empty property list.".to_string()); - } + if let Err(e) = try_insert(&name, &value) { + return Err(e); + } + name.clear(); + value.clear(); + } else { + (0..(escaped - 1) / 2).for_each(|_| value.push('\\')); + value.push(','); + escaped = 0; + } + } else { + if in_name { + (0..escaped).for_each(|_| name.push('\\')); + name.push(c) + } else { + (0..escaped).for_each(|_| value.push('\\')); + value.push(c) + } + escaped = 0; + } + } + if !in_name && value.trim_end().trim_start().is_empty() { + return Err("Expected a value after last `=`".to_string()); + } else if name.is_empty() || value.is_empty() { + return Err("Expected non empty property list.".to_string()); + } - if let Err(e) = try_insert(&name, &value) { - return Err(e) - } + if let Err(e) = try_insert(&name, &value) { + return Err(e); + } - // TODO: Missing properties - - Ok(properties) - } + if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| { + if !properties.properties.contains_key(key) { + if let Some(default) = &prop.default { + properties + .properties + .insert(key.clone(), (prop, default.clone())); + } else if prop.required { + return Err(format!("Missing required property: {prop}")); + } + } + Ok(()) + }) { + Err(e) + } else { + Ok(properties) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + document::langdocument::LangDocument, + elements::{comment::Comment, style::Style, text::Text}, + parser::source::{SourceFile, Token}, + }; + use std::rc::Rc; + + #[test] + fn process_text_tests() { + let source = Rc::new(SourceFile::with_content( + "".to_string(), + "".to_string(), + None, + )); + let doc = LangDocument::new(source.clone(), None); + + assert_eq!(process_text(&doc, "a\nb"), "a b"); + assert_eq!(process_text(&doc, "a\n\nb"), "a b"); // Should never happen but why not + assert_eq!(process_text(&doc, "a\\b"), "ab"); + assert_eq!(process_text(&doc, "a\\\nb"), "a\nb"); + assert_eq!(process_text(&doc, "a\\\\b"), "a\\b"); + assert_eq!(process_text(&doc, "a\\\\\nb"), "a\\ b"); + assert_eq!(process_text(&doc, "\na"), "a"); + + let tok = Token::new(0..0, source); + doc.push(Box::new(Paragraph::new(tok.clone()))); + + // A space is appended as previous element is inline + (&doc as &dyn Document) + .last_element_mut::() + .unwrap() + .push(Box::new(Text::new(tok.clone(), "TEXT".to_string()))); + assert_eq!(process_text(&doc, "\na"), " a"); + + (&doc as &dyn Document) + .last_element_mut::() + .unwrap() + .push(Box::new(Style::new(tok.clone(), 0, false))); + assert_eq!(process_text(&doc, "\na"), " a"); + + // Comments are ignored (kind => Invisible) + (&doc as &dyn Document) + .last_element_mut::() + .unwrap() + .push(Box::new(Comment::new(tok.clone(), "COMMENT".to_string()))); + assert_eq!(process_text(&doc, "\na"), " a"); + } + + #[test] + fn process_escaped_tests() { + assert_eq!( + process_escaped( + '\\', + "%", + "escaped: \\%, also escaped: \\\\\\%, untouched: \\a" + ), + "escaped: %, also escaped: \\%, untouched: \\a" + ); + assert_eq!( + process_escaped('"', "><)))°>", "Escaped fish: \"><)))°>"), + "Escaped fish: ><)))°>".to_string() + ); + assert_eq!( + process_escaped('\\', "]", "Escaped \\]"), + "Escaped ]".to_string() + ); + assert_eq!( + process_escaped('\\', "]", "Unescaped \\\\]"), + "Unescaped \\\\]".to_string() + ); + assert_eq!( + process_escaped('\\', "]", "Escaped \\\\\\]"), + "Escaped \\]".to_string() + ); + assert_eq!( + process_escaped('\\', "]", "Unescaped \\\\\\\\]"), + "Unescaped \\\\\\\\]".to_string() + ); + } + + #[test] + fn property_parser_tests() { + let mut properties = HashMap::new(); + properties.insert( + "width".to_string(), + Property::new(true, "Width of the element in em".to_string(), None), + ); + properties.insert( + "length".to_string(), + Property::new(false, "Length in cm".to_string(), None), + ); + properties.insert( + "angle".to_string(), + Property::new( + true, + "Angle in degrees".to_string(), + Some("180".to_string()), + ), + ); + properties.insert( + "weight".to_string(), + Property::new(false, "Weight in %".to_string(), Some("0.42".to_string())), + ); + + let parser = PropertyParser::new(properties); + let pm = parser.parse("width=15,length=-10").unwrap(); + + // Ok + assert_eq!(pm.get("width", |_, s| s.parse::()).unwrap().1, 15); + assert_eq!(pm.get("length", |_, s| s.parse::()).unwrap().1, -10); + assert_eq!(pm.get("angle", |_, s| s.parse::()).unwrap().1, 180f64); + assert_eq!(pm.get("angle", |_, s| s.parse::()).unwrap().1, 180); + assert_eq!( + pm.get("weight", |_, s| s.parse::()).unwrap().1, + 0.42f32 + ); + assert_eq!( + pm.get("weight", |_, s| s.parse::()).unwrap().1, + 0.42f64 + ); + + // Error + assert!(pm.get("length", |_, s| s.parse::()).is_err()); + assert!(pm.get("height", |_, s| s.parse::()).is_err()); + + // Missing property + assert!(parser.parse("length=15").is_err()); + + // Defaults + assert!(parser.parse("width=15").is_ok()); + assert_eq!( + parser + .parse("width=0,weight=0.15") + .unwrap() + .get("weight", |_, s| s.parse::()) + .unwrap() + .1, + 0.15f32 + ); + } } diff --git a/src/server.rs b/src/server.rs index b73e404..2e26c32 100644 --- a/src/server.rs +++ b/src/server.rs @@ -51,9 +51,9 @@ impl Backend { let parser = LangParser::default(); let doc = parser.parse(Rc::new(source), None); - let semantic_tokens = semantic_token_from_document(&doc); - self.semantic_token_map - .insert(params.uri.to_string(), semantic_tokens); + //let semantic_tokens = semantic_token_from_document(&doc); + //self.semantic_token_map + // .insert(params.uri.to_string(), semantic_tokens); } }