Better variable definition
This commit is contained in:
parent
78509f7ae5
commit
0b3363313b
3 changed files with 30 additions and 15 deletions
|
@ -16,6 +16,8 @@ pub trait Variable {
|
|||
/// Converts variable to a string
|
||||
fn to_string(&self) -> String;
|
||||
|
||||
fn value_token(&self) -> &Token;
|
||||
|
||||
fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document<'a>);
|
||||
}
|
||||
|
||||
|
@ -29,14 +31,16 @@ impl core::fmt::Debug for dyn Variable {
|
|||
pub struct BaseVariable {
|
||||
location: Token,
|
||||
name: String,
|
||||
value_token: Token,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl BaseVariable {
|
||||
pub fn new(location: Token, name: String, value: String) -> Self {
|
||||
pub fn new(location: Token, name: String, value_token: Token, value: String) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
value_token,
|
||||
value,
|
||||
}
|
||||
}
|
||||
|
@ -49,6 +53,8 @@ impl Variable for BaseVariable {
|
|||
|
||||
fn to_string(&self) -> String { self.value.clone() }
|
||||
|
||||
fn value_token(&self) -> &Token { &self.value_token }
|
||||
|
||||
fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
self.location().clone(),
|
||||
|
@ -68,14 +74,16 @@ impl Variable for BaseVariable {
|
|||
pub struct PathVariable {
|
||||
location: Token,
|
||||
name: String,
|
||||
value_token: Token,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl PathVariable {
|
||||
pub fn new(location: Token, name: String, path: PathBuf) -> Self {
|
||||
pub fn new(location: Token, name: String, value_token: Token, path: PathBuf) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
value_token,
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
@ -88,6 +96,8 @@ impl Variable for PathVariable {
|
|||
|
||||
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
|
||||
|
||||
fn value_token(&self) -> &Token { &self.value_token }
|
||||
|
||||
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
location,
|
||||
|
|
|
@ -15,13 +15,6 @@ use crate::parser::rule::Rule;
|
|||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Token;
|
||||
|
||||
// TODO: Full refactor
|
||||
// Problem is that document parsed from other sources i.e by variables
|
||||
// are not merged correctly into existing paragraph
|
||||
// A solution would be to use the "(\n){2,}" regex to split paragraph, which would reduce the work needed for process_text
|
||||
// Another fix would be to keep parsing (recursively) into the same document (like previous version)
|
||||
// The issue is that this would break the current `Token` implementation
|
||||
// Which would need to be reworked
|
||||
#[derive(Debug)]
|
||||
pub struct Paragraph {
|
||||
pub location: Token,
|
||||
|
|
|
@ -58,14 +58,20 @@ impl VariableRule {
|
|||
location: Token,
|
||||
kind: usize,
|
||||
name: String,
|
||||
value_token: Token,
|
||||
value: String,
|
||||
) -> Result<Rc<dyn Variable>, String> {
|
||||
match self.kinds[kind].0.as_str() {
|
||||
"" => Ok(Rc::new(BaseVariable::new(location, name, value))),
|
||||
"" => Ok(Rc::new(BaseVariable::new(
|
||||
location,
|
||||
name,
|
||||
value_token,
|
||||
value,
|
||||
))),
|
||||
"'" => {
|
||||
match std::fs::canonicalize(value.as_str()) // TODO: not canonicalize
|
||||
{
|
||||
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))),
|
||||
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, value_token, path))),
|
||||
Err(e) => Err(format!("Unable to canonicalize path `{}`: {}",
|
||||
value.fg(colors.highlight),
|
||||
e))
|
||||
|
@ -204,9 +210,9 @@ impl RegexRule for VariableRule {
|
|||
_ => panic!("Unknown variable name"),
|
||||
};
|
||||
|
||||
let var_value = match matches.get(3) {
|
||||
let (val_token, var_value) = match matches.get(3) {
|
||||
Some(value) => match VariableRule::validate_value(value.as_str()) {
|
||||
Ok(var_value) => var_value,
|
||||
Ok(var_value) => (Token::new(value.range(), token.source()), var_value),
|
||||
Err(msg) => {
|
||||
report_err!(
|
||||
&mut reports,
|
||||
|
@ -232,6 +238,7 @@ impl RegexRule for VariableRule {
|
|||
token.clone(),
|
||||
var_kind,
|
||||
var_name.to_string(),
|
||||
val_token,
|
||||
var_value,
|
||||
) {
|
||||
Ok(variable) => document.add_variable(variable),
|
||||
|
@ -279,7 +286,12 @@ impl RegexRule for VariableRule {
|
|||
lua.create_function(|_, (name, value): (String, String)| {
|
||||
CTX.with_borrow(|ctx| {
|
||||
ctx.as_ref().map(|ctx| {
|
||||
let var = Rc::new(BaseVariable::new(ctx.location.clone(), name, value));
|
||||
let var = Rc::new(BaseVariable::new(
|
||||
ctx.location.clone(),
|
||||
name,
|
||||
ctx.location.clone(),
|
||||
value,
|
||||
));
|
||||
ctx.document.add_variable(var);
|
||||
})
|
||||
});
|
||||
|
@ -440,7 +452,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
}
|
||||
|
||||
// Add definition
|
||||
definition::from_source(token, variable.location(), &state.shared.lsp);
|
||||
definition::from_source(token, variable.value_token(), &state.shared.lsp);
|
||||
|
||||
reports
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue