Better variable definition

This commit is contained in:
ef3d0c3e 2024-11-19 14:22:33 +01:00
parent 78509f7ae5
commit 0b3363313b
3 changed files with 30 additions and 15 deletions

View file

@ -16,6 +16,8 @@ pub trait Variable {
/// Converts variable to a string /// Converts variable to a string
fn to_string(&self) -> String; fn to_string(&self) -> String;
fn value_token(&self) -> &Token;
fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document<'a>); fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document<'a>);
} }
@ -29,14 +31,16 @@ impl core::fmt::Debug for dyn Variable {
pub struct BaseVariable { pub struct BaseVariable {
location: Token, location: Token,
name: String, name: String,
value_token: Token,
value: String, value: String,
} }
impl BaseVariable { impl BaseVariable {
pub fn new(location: Token, name: String, value: String) -> Self { pub fn new(location: Token, name: String, value_token: Token, value: String) -> Self {
Self { Self {
location, location,
name, name,
value_token,
value, value,
} }
} }
@ -49,6 +53,8 @@ impl Variable for BaseVariable {
fn to_string(&self) -> String { self.value.clone() } fn to_string(&self) -> String { self.value.clone() }
fn value_token(&self) -> &Token { &self.value_token }
fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) { fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) {
let source = Rc::new(VirtualSource::new( let source = Rc::new(VirtualSource::new(
self.location().clone(), self.location().clone(),
@ -68,14 +74,16 @@ impl Variable for BaseVariable {
pub struct PathVariable { pub struct PathVariable {
location: Token, location: Token,
name: String, name: String,
value_token: Token,
path: PathBuf, path: PathBuf,
} }
impl PathVariable { impl PathVariable {
pub fn new(location: Token, name: String, path: PathBuf) -> Self { pub fn new(location: Token, name: String, value_token: Token, path: PathBuf) -> Self {
Self { Self {
location, location,
name, name,
value_token,
path, path,
} }
} }
@ -88,6 +96,8 @@ impl Variable for PathVariable {
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() } fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
fn value_token(&self) -> &Token { &self.value_token }
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) { fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) {
let source = Rc::new(VirtualSource::new( let source = Rc::new(VirtualSource::new(
location, location,

View file

@ -15,13 +15,6 @@ use crate::parser::rule::Rule;
use crate::parser::source::Cursor; use crate::parser::source::Cursor;
use crate::parser::source::Token; use crate::parser::source::Token;
// TODO: Full refactor
// Problem is that document parsed from other sources i.e by variables
// are not merged correctly into existing paragraph
// A solution would be to use the "(\n){2,}" regex to split paragraph, which would reduce the work needed for process_text
// Another fix would be to keep parsing (recursively) into the same document (like previous version)
// The issue is that this would break the current `Token` implementation
// Which would need to be reworked
#[derive(Debug)] #[derive(Debug)]
pub struct Paragraph { pub struct Paragraph {
pub location: Token, pub location: Token,

View file

@ -58,14 +58,20 @@ impl VariableRule {
location: Token, location: Token,
kind: usize, kind: usize,
name: String, name: String,
value_token: Token,
value: String, value: String,
) -> Result<Rc<dyn Variable>, String> { ) -> Result<Rc<dyn Variable>, String> {
match self.kinds[kind].0.as_str() { match self.kinds[kind].0.as_str() {
"" => Ok(Rc::new(BaseVariable::new(location, name, value))), "" => Ok(Rc::new(BaseVariable::new(
location,
name,
value_token,
value,
))),
"'" => { "'" => {
match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize
{ {
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))), Ok(path) => Ok(Rc::new(PathVariable::new(location, name, value_token, path))),
Err(e) => Err(format!("Unable to canonicalize path `{}`: {}", Err(e) => Err(format!("Unable to canonicalize path `{}`: {}",
value.fg(colors.highlight), value.fg(colors.highlight),
e)) e))
@ -204,9 +210,9 @@ impl RegexRule for VariableRule {
_ => panic!("Unknown variable name"), _ => panic!("Unknown variable name"),
}; };
let var_value = match matches.get(3) { let (val_token, var_value) = match matches.get(3) {
Some(value) => match VariableRule::validate_value(value.as_str()) { Some(value) => match VariableRule::validate_value(value.as_str()) {
Ok(var_value) => var_value, Ok(var_value) => (Token::new(value.range(), token.source()), var_value),
Err(msg) => { Err(msg) => {
report_err!( report_err!(
&mut reports, &mut reports,
@ -232,6 +238,7 @@ impl RegexRule for VariableRule {
token.clone(), token.clone(),
var_kind, var_kind,
var_name.to_string(), var_name.to_string(),
val_token,
var_value, var_value,
) { ) {
Ok(variable) => document.add_variable(variable), Ok(variable) => document.add_variable(variable),
@ -279,7 +286,12 @@ impl RegexRule for VariableRule {
lua.create_function(|_, (name, value): (String, String)| { lua.create_function(|_, (name, value): (String, String)| {
CTX.with_borrow(|ctx| { CTX.with_borrow(|ctx| {
ctx.as_ref().map(|ctx| { ctx.as_ref().map(|ctx| {
let var = Rc::new(BaseVariable::new(ctx.location.clone(), name, value)); let var = Rc::new(BaseVariable::new(
ctx.location.clone(),
name,
ctx.location.clone(),
value,
));
ctx.document.add_variable(var); ctx.document.add_variable(var);
}) })
}); });
@ -440,7 +452,7 @@ impl RegexRule for VariableSubstitutionRule {
} }
// Add definition // Add definition
definition::from_source(token, variable.location(), &state.shared.lsp); definition::from_source(token, variable.value_token(), &state.shared.lsp);
reports reports
} }