Experimental range resolving
This commit is contained in:
parent
d4c8e1c897
commit
726dbdaf7c
15 changed files with 420 additions and 104 deletions
|
@ -12,8 +12,6 @@ pub trait Variable {
|
|||
fn location(&self) -> &Token;
|
||||
|
||||
fn name(&self) -> &str;
|
||||
/// Parse variable from string, returns an error message on failure
|
||||
fn from_string(&mut self, str: &str) -> Option<String>;
|
||||
|
||||
/// Converts variable to a string
|
||||
fn to_string(&self) -> String;
|
||||
|
@ -49,11 +47,6 @@ impl Variable for BaseVariable {
|
|||
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.value = str.to_string();
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.value.clone() }
|
||||
|
||||
fn parse<'a>(&self, state: &ParserState, _location: Token, document: &'a dyn Document<'a>) {
|
||||
|
@ -91,11 +84,6 @@ impl Variable for PathVariable {
|
|||
|
||||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.path = std::fs::canonicalize(str).unwrap();
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
|
||||
|
||||
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) {
|
||||
|
|
|
@ -14,7 +14,6 @@ use ariadne::Report;
|
|||
use ariadne::ReportKind;
|
||||
use regex::Captures;
|
||||
use regex::Regex;
|
||||
use std::cell::RefMut;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ use crate::compiler::compiler::Target;
|
|||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::lsp::semantic::Semantics;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::layout::LayoutHolder;
|
||||
use crate::parser::layout::LayoutType;
|
||||
|
@ -516,6 +517,25 @@ impl RegexRule for LayoutRule {
|
|||
|| panic!("Invalid state at: `{STATE_NAME}`"),
|
||||
|s| s.stack.push((vec![token.clone()], layout_type.clone())),
|
||||
);
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_BEGIN".len(),
|
||||
tokens.layout_token,
|
||||
);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range()) {
|
||||
sems.add(props.start - 1..props.start, tokens.layout_props_sep);
|
||||
sems.add(props.clone(), tokens.layout_props);
|
||||
sems.add(props.end..props.end + 1, tokens.layout_props_sep);
|
||||
}
|
||||
sems.add(matches.get(2).unwrap().range(), tokens.layout_type);
|
||||
}
|
||||
}
|
||||
};
|
||||
return reports;
|
||||
|
@ -578,6 +598,24 @@ impl RegexRule for LayoutRule {
|
|||
}
|
||||
};
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_NEXT".len(),
|
||||
tokens.layout_token,
|
||||
);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range()) {
|
||||
sems.add(props.start - 1..props.start, tokens.layout_props_sep);
|
||||
sems.add(props.clone(), tokens.layout_props);
|
||||
sems.add(props.end..props.end + 1, tokens.layout_props_sep);
|
||||
}
|
||||
}
|
||||
|
||||
tokens.push(token.clone());
|
||||
(
|
||||
tokens.len() - 1,
|
||||
|
@ -585,6 +623,7 @@ impl RegexRule for LayoutRule {
|
|||
layout_type.clone(),
|
||||
properties,
|
||||
)
|
||||
|
||||
} else {
|
||||
// LAYOUT_END
|
||||
let mut rule_state_borrow = rule_state.as_ref().borrow_mut();
|
||||
|
@ -644,6 +683,25 @@ impl RegexRule for LayoutRule {
|
|||
let layout_type = layout_type.clone();
|
||||
let id = tokens.len();
|
||||
layout_state.stack.pop();
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let start = matches.get(0).map(|m| {
|
||||
m.start() + token.source().content()[m.start()..].find('#').unwrap()
|
||||
}).unwrap();
|
||||
sems.add(start..start + 2, tokens.layout_sep);
|
||||
sems.add(
|
||||
start + 2..start + 2 + "LAYOUT_END".len(),
|
||||
tokens.layout_token,
|
||||
);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range()) {
|
||||
sems.add(props.start - 1..props.start, tokens.layout_props_sep);
|
||||
sems.add(props.clone(), tokens.layout_props);
|
||||
sems.add(props.end..props.end + 1, tokens.layout_props_sep);
|
||||
}
|
||||
}
|
||||
|
||||
(id, LayoutToken::End, layout_type, properties)
|
||||
};
|
||||
|
||||
|
@ -879,7 +937,7 @@ mod tests {
|
|||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use crate::{validate_document, validate_semantics};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -996,4 +1054,37 @@ mod tests {
|
|||
Layout { token == LayoutToken::End, id == 2 };
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn semantic() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
#+LAYOUT_BEGIN Split
|
||||
#+LAYOUT_NEXT[style=aa]
|
||||
#+LAYOUT_END
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
validate_semantics!(state, source.clone(), 0,
|
||||
layout_sep { delta_line == 1, delta_start == 0, length == 2 };
|
||||
layout_token { delta_line == 0, delta_start == 2, length == 12 };
|
||||
layout_type { delta_line == 0, delta_start == 12, length == 6 };
|
||||
layout_sep { delta_line == 1, delta_start == 1, length == 2 };
|
||||
layout_token { delta_line == 0, delta_start == 2, length == 11 };
|
||||
layout_props_sep { delta_line == 0, delta_start == 11, length == 1 };
|
||||
layout_props { delta_line == 0, delta_start == 1, length == 8 };
|
||||
layout_props_sep { delta_line == 0, delta_start == 8, length == 1 };
|
||||
layout_sep { delta_line == 1, delta_start == 0, length == 2 };
|
||||
layout_token { delta_line == 0, delta_start == 2, length == 10 };
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,8 +127,8 @@ impl RegexRule for LinkRule {
|
|||
);
|
||||
return reports;
|
||||
}
|
||||
let processed = util::process_escaped('\\', "]", display.as_str());
|
||||
if processed.is_empty() {
|
||||
let display_source = util::escape_source(token.source(), display.range(), "Link Display".into(), '\\', "](");
|
||||
if display_source.content().is_empty() {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), display.start())
|
||||
.with_message("Empty link name")
|
||||
|
@ -137,7 +137,7 @@ impl RegexRule for LinkRule {
|
|||
.with_message(format!(
|
||||
"Link name is empty. Once processed, `{}` yields `{}`",
|
||||
display.as_str().fg(state.parser.colors().highlight),
|
||||
processed.fg(state.parser.colors().highlight),
|
||||
display_source.fg(state.parser.colors().highlight),
|
||||
))
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
|
@ -154,12 +154,7 @@ impl RegexRule for LinkRule {
|
|||
tokens.link_display_sep,
|
||||
);
|
||||
}
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
Token::new(display.range(), token.source()),
|
||||
"Link Display".to_string(),
|
||||
processed,
|
||||
));
|
||||
match util::parse_paragraph(state, source, document) {
|
||||
match util::parse_paragraph(state, display_source, document) {
|
||||
Err(err) => {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), display.start())
|
||||
|
@ -376,6 +371,22 @@ nml.link.push("**BOLD link**", "another url")
|
|||
|
||||
#[test]
|
||||
fn semantics() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#" - [la(*testi*nk](url)
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
println!("{:#?}", state.shared.semantics);
|
||||
return;
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
|
|
|
@ -359,7 +359,6 @@ impl Rule for ListRule {
|
|||
// Content
|
||||
let entry_start = captures.get(3).unwrap().start();
|
||||
let mut entry_content = captures.get(3).unwrap().as_str().to_string();
|
||||
let mut spacing: Option<(Range<usize>, &str)> = None;
|
||||
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
|
||||
// Break if next element is another entry
|
||||
if captures.get(0).unwrap().start() != end_cursor.pos
|
||||
|
@ -379,44 +378,12 @@ impl Rule for ListRule {
|
|||
// Advance cursor
|
||||
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
|
||||
|
||||
// Spacing
|
||||
let current_spacing = captures.get(1).unwrap().as_str();
|
||||
if let Some(spacing) = &spacing {
|
||||
if spacing.1 != current_spacing {
|
||||
reports.push(
|
||||
Report::build(
|
||||
ReportKind::Warning,
|
||||
cursor.source.clone(),
|
||||
captures.get(1).unwrap().start(),
|
||||
)
|
||||
.with_message("Invalid list entry spacing")
|
||||
.with_label(
|
||||
Label::new((
|
||||
cursor.source.clone(),
|
||||
captures.get(1).unwrap().range(),
|
||||
))
|
||||
.with_message("Spacing for list entries do not match")
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.with_label(
|
||||
Label::new((cursor.source.clone(), spacing.0.clone()))
|
||||
.with_message("Previous spacing")
|
||||
.with_color(state.parser.colors().warning),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
spacing = Some((captures.get(1).unwrap().range(), current_spacing));
|
||||
}
|
||||
|
||||
entry_content += "\n";
|
||||
entry_content += captures.get(1).unwrap().as_str();
|
||||
}
|
||||
|
||||
// Parse entry content
|
||||
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
|
||||
//println!("content={}", entry_content);
|
||||
let entry_src = Rc::new(VirtualSource::new(
|
||||
token.clone(),
|
||||
"List Entry".to_string(),
|
||||
|
@ -561,8 +528,6 @@ mod tests {
|
|||
*[offset=5] First **bold**
|
||||
Second line
|
||||
*- Another
|
||||
|
||||
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
|
|
|
@ -32,8 +32,6 @@ pub struct Paragraph {
|
|||
}
|
||||
|
||||
impl Paragraph {
|
||||
pub fn is_empty(&self) -> bool { self.content.is_empty() }
|
||||
|
||||
pub fn find_back<P: FnMut(&&Box<dyn Element + 'static>) -> bool>(
|
||||
&self,
|
||||
predicate: P,
|
||||
|
|
|
@ -2,6 +2,7 @@ use crate::compiler::compiler::Compiler;
|
|||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::lsp::semantic::Semantics;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
|
@ -227,6 +228,21 @@ impl RegexRule for RawRule {
|
|||
}),
|
||||
);
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let range = matches.get(0).unwrap().range();
|
||||
sems.add(range.start..range.start+2, tokens.raw_sep);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range())
|
||||
{
|
||||
sems.add(props.start - 1..props.start, tokens.raw_props_sep);
|
||||
sems.add(props.clone(), tokens.raw_props);
|
||||
sems.add(props.end..props.end + 1, tokens.raw_props_sep);
|
||||
}
|
||||
sems.add(matches.get(2).unwrap().range(), tokens.raw_content);
|
||||
sems.add(range.end-2..range.end, tokens.raw_sep);
|
||||
}
|
||||
|
||||
reports
|
||||
}
|
||||
|
||||
|
@ -281,7 +297,7 @@ mod tests {
|
|||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use crate::{validate_document, validate_semantics};
|
||||
|
||||
#[test]
|
||||
fn parser() {
|
||||
|
@ -338,4 +354,35 @@ Break%<nml.raw.push("block", "Raw")>%NewParagraph%<nml.raw.push("inline", "<b>")
|
|||
};
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn semantic() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
{?[kind=block] Raw?}
|
||||
{?<b>?}
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
validate_semantics!(state, source.clone(), 0,
|
||||
raw_sep { delta_line == 1, delta_start == 0, length == 2 };
|
||||
raw_props_sep { delta_line == 0, delta_start == 2, length == 1 };
|
||||
raw_props { delta_line == 0, delta_start == 1, length == 10 };
|
||||
raw_props_sep { delta_line == 0, delta_start == 10, length == 1 };
|
||||
raw_content { delta_line == 0, delta_start == 1, length == 4 };
|
||||
raw_sep { delta_line == 0, delta_start == 4, length == 2 };
|
||||
raw_sep { delta_line == 1, delta_start == 0, length == 2 };
|
||||
raw_content { delta_line == 0, delta_start == 2, length == 3 };
|
||||
raw_sep { delta_line == 0, delta_start == 3, length == 2 };
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::cell::RefMut;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
|
|
@ -9,6 +9,7 @@ use crate::lua::kernel::CTX;
|
|||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
use crate::parser::rule::RegexRule;
|
||||
use crate::parser::source::original_range;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::Token;
|
||||
use crate::parser::state::RuleState;
|
||||
|
|
|
@ -28,6 +28,7 @@ use crate::compiler::compiler::Target;
|
|||
use crate::document::document::Document;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::document::element::Element;
|
||||
use crate::lsp::semantic::Semantics;
|
||||
use crate::lua::kernel::CTX;
|
||||
use crate::parser::parser::ParseMode;
|
||||
use crate::parser::parser::ParserState;
|
||||
|
@ -429,7 +430,7 @@ impl RegexRule for TexRule {
|
|||
document,
|
||||
Box::new(Tex {
|
||||
mathmode: index == 1,
|
||||
location: token,
|
||||
location: token.clone(),
|
||||
kind: tex_kind,
|
||||
env: tex_env.to_string(),
|
||||
tex: tex_content,
|
||||
|
@ -437,6 +438,19 @@ impl RegexRule for TexRule {
|
|||
}),
|
||||
);
|
||||
|
||||
if let Some((sems, tokens)) =
|
||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
||||
{
|
||||
let range = token.range;
|
||||
sems.add(range.start..range.start + if index == 0 { 2 } else { 1 }, tokens.tex_sep);
|
||||
if let Some(props) = matches.get(1).map(|m| m.range()) {
|
||||
sems.add(props.start - 1..props.start, tokens.tex_props_sep);
|
||||
sems.add(props.clone(), tokens.tex_props);
|
||||
sems.add(props.end..props.end + 1, tokens.tex_props_sep);
|
||||
}
|
||||
sems.add(matches.get(2).unwrap().range(), tokens.tex_content);
|
||||
sems.add(range.end - if index == 0 { 2 } else { 1 }..range.end, tokens.tex_sep);
|
||||
}
|
||||
reports
|
||||
}
|
||||
|
||||
|
@ -536,7 +550,7 @@ mod tests {
|
|||
use crate::parser::langparser::LangParser;
|
||||
use crate::parser::parser::Parser;
|
||||
use crate::parser::source::SourceFile;
|
||||
use crate::validate_document;
|
||||
use crate::{validate_document, validate_semantics};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -607,4 +621,32 @@ $[env=another] e^{i\pi}=-1$
|
|||
};
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn semantic() {
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
$[kind=inline]\LaTeX$
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_, state) = parser.parse(
|
||||
ParserState::new_with_semantics(&parser, None),
|
||||
source.clone(),
|
||||
None,
|
||||
ParseMode::default(),
|
||||
);
|
||||
validate_semantics!(state, source.clone(), 0,
|
||||
tex_sep { delta_line == 1, delta_start == 0, length == 1 };
|
||||
tex_props_sep { delta_line == 0, delta_start == 1, length == 1 };
|
||||
tex_props { delta_line == 0, delta_start == 1, length == 11 };
|
||||
tex_props_sep { delta_line == 0, delta_start == 11, length == 1 };
|
||||
tex_content { delta_line == 0, delta_start == 1, length == 6 };
|
||||
tex_sep { delta_line == 0, delta_start == 6, length == 1 };
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ use tower_lsp::lsp_types::SemanticToken;
|
|||
use tower_lsp::lsp_types::SemanticTokenModifier;
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
use crate::parser::source::original_range;
|
||||
use crate::parser::source::LineCursor;
|
||||
use crate::parser::source::Source;
|
||||
use crate::parser::source::SourceFile;
|
||||
|
@ -139,6 +140,22 @@ pub struct Tokens {
|
|||
pub list_bullet: (u32, u32),
|
||||
pub list_props_sep: (u32, u32),
|
||||
pub list_props: (u32, u32),
|
||||
|
||||
pub raw_sep: (u32, u32),
|
||||
pub raw_props_sep: (u32, u32),
|
||||
pub raw_props: (u32, u32),
|
||||
pub raw_content: (u32, u32),
|
||||
|
||||
pub tex_sep: (u32, u32),
|
||||
pub tex_props_sep: (u32, u32),
|
||||
pub tex_props: (u32, u32),
|
||||
pub tex_content: (u32, u32),
|
||||
|
||||
pub layout_sep: (u32, u32),
|
||||
pub layout_token: (u32, u32),
|
||||
pub layout_props_sep: (u32, u32),
|
||||
pub layout_props: (u32, u32),
|
||||
pub layout_type: (u32, u32),
|
||||
}
|
||||
|
||||
impl Tokens {
|
||||
|
@ -160,7 +177,7 @@ impl Tokens {
|
|||
import_import: token!("macro"),
|
||||
import_as_sep: token!("operator"),
|
||||
import_as: token!("operator"),
|
||||
import_path: token!("function"),
|
||||
import_path: token!("parameter"),
|
||||
|
||||
reference_operator: token!("operator"),
|
||||
reference_link_sep: token!("operator"),
|
||||
|
@ -174,7 +191,7 @@ impl Tokens {
|
|||
variable_kind: token!("operator"),
|
||||
variable_name: token!("macro"),
|
||||
variable_sep: token!("operator"),
|
||||
variable_value: token!("function"),
|
||||
variable_value: token!("parameter"),
|
||||
|
||||
variable_sub_sep: token!("operator"),
|
||||
variable_sub_name: token!("macro"),
|
||||
|
@ -195,6 +212,22 @@ impl Tokens {
|
|||
list_bullet: token!("macro"),
|
||||
list_props_sep: token!("operator"),
|
||||
list_props: token!("enum"),
|
||||
|
||||
raw_sep: token!("operator"),
|
||||
raw_props_sep: token!("operator"),
|
||||
raw_props: token!("enum"),
|
||||
raw_content: token!("string"),
|
||||
|
||||
tex_sep: token!("modifier"),
|
||||
tex_props_sep: token!("operator"),
|
||||
tex_props: token!("enum"),
|
||||
tex_content: token!("string"),
|
||||
|
||||
layout_sep: token!("number"),
|
||||
layout_token: token!("number"),
|
||||
layout_props_sep: token!("operator"),
|
||||
layout_props: token!("enum"),
|
||||
layout_type: token!("function"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -221,15 +254,17 @@ impl SemanticsData {
|
|||
#[derive(Debug)]
|
||||
pub struct Semantics<'a> {
|
||||
pub(self) sems: Ref<'a, SemanticsData>,
|
||||
// TODO
|
||||
pub(self) original_source: Rc<dyn Source>,
|
||||
/// The resolved parent source
|
||||
pub(self) source: Rc<dyn Source>,
|
||||
pub(self) range: Range<usize>,
|
||||
}
|
||||
|
||||
impl<'a> Semantics<'a> {
|
||||
fn from_source_impl(
|
||||
source: Rc<dyn Source>,
|
||||
semantics: &'a Option<RefCell<SemanticsHolder>>,
|
||||
range: Range<usize>,
|
||||
original_source: Rc<dyn Source>,
|
||||
) -> Option<(Self, Ref<'a, Tokens>)> {
|
||||
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
|
||||
return None;
|
||||
|
@ -243,7 +278,8 @@ impl<'a> Semantics<'a> {
|
|||
.map(|parent| parent.location())
|
||||
.unwrap_or(None)
|
||||
{
|
||||
return Self::from_source_impl(location.source(), semantics, range);
|
||||
//let range = location.range.start+range.start..location.range.start+range.end;
|
||||
return Self::from_source_impl(location.source(), semantics, original_source);
|
||||
} else if let Some(source) = source.clone().downcast_rc::<SourceFile>().ok() {
|
||||
return Ref::filter_map(
|
||||
semantics.as_ref().unwrap().borrow(),
|
||||
|
@ -257,7 +293,7 @@ impl<'a> Semantics<'a> {
|
|||
Self {
|
||||
sems,
|
||||
source,
|
||||
range,
|
||||
original_source,
|
||||
},
|
||||
Ref::map(
|
||||
semantics.as_ref().unwrap().borrow(),
|
||||
|
@ -276,18 +312,15 @@ impl<'a> Semantics<'a> {
|
|||
if semantics.is_none() {
|
||||
return None;
|
||||
}
|
||||
let range = source.location().map_or_else(
|
||||
|| 0..source.content().len(),
|
||||
|location| location.range.clone(),
|
||||
);
|
||||
return Self::from_source_impl(source, semantics, range);
|
||||
return Self::from_source_impl(source.clone(), semantics, source);
|
||||
}
|
||||
|
||||
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
|
||||
let range = self.range.start + range.start..self.range.start + range.end;
|
||||
let range = original_range(self.original_source.clone(), range).1;
|
||||
let mut tokens = self.sems.tokens.borrow_mut();
|
||||
let mut cursor = self.sems.cursor.borrow_mut();
|
||||
let mut current = cursor.clone();
|
||||
println!("range={range:#?}");
|
||||
cursor.move_to(range.start);
|
||||
|
||||
while cursor.pos != range.end {
|
||||
|
|
|
@ -2,7 +2,6 @@ use ariadne::Label;
|
|||
use ariadne::Report;
|
||||
use std::any::Any;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
|
|
@ -70,11 +70,48 @@ impl Source for SourceFile {
|
|||
fn content(&self) -> &String { &self.content }
|
||||
}
|
||||
|
||||
/// Stores the offsets in a virtual source
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Let's say you make a virtual source from the following: "Con\]tent" -> "Con]tent"
|
||||
/// Then at position 3, an offset of 1 will be created to account for the removed '\'
|
||||
#[derive(Debug)]
|
||||
struct SourceOffset
|
||||
{
|
||||
/// Stores the total offsets
|
||||
offsets: Vec<(usize, isize)>,
|
||||
}
|
||||
|
||||
impl SourceOffset
|
||||
{
|
||||
/// Get the offset position
|
||||
pub fn position(&self, pos: usize) -> usize
|
||||
{
|
||||
match self.offsets.binary_search_by_key(&pos, |&(orig, _)| orig)
|
||||
{
|
||||
Ok(idx) => (pos as isize + self.offsets[idx].1) as usize,
|
||||
Err(idx) => {
|
||||
if idx == 0
|
||||
{
|
||||
pos
|
||||
}
|
||||
else
|
||||
{
|
||||
(pos as isize + self.offsets[idx - 1].1) as usize
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VirtualSource {
|
||||
location: Token,
|
||||
name: String,
|
||||
content: String,
|
||||
/// Offset relative to the [`location`]'s source
|
||||
offsets: Option<SourceOffset>,
|
||||
}
|
||||
|
||||
impl VirtualSource {
|
||||
|
@ -83,6 +120,16 @@ impl VirtualSource {
|
|||
location,
|
||||
name,
|
||||
content,
|
||||
offsets: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_offsets(location: Token, name: String, content: String, offsets: Vec<(usize, isize)>) -> Self {
|
||||
Self {
|
||||
location,
|
||||
name,
|
||||
content,
|
||||
offsets: Some(SourceOffset { offsets }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -93,6 +140,49 @@ impl Source for VirtualSource {
|
|||
fn content(&self) -> &String { &self.content }
|
||||
}
|
||||
|
||||
/// Transforms a position to it's position in the oldest parent source
|
||||
pub fn original_position(source: Rc<dyn Source>, mut pos: usize) -> (Rc<dyn Source>, usize)
|
||||
{
|
||||
// Apply offsets
|
||||
if let Some(offsets) =
|
||||
source.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
{
|
||||
pos = offsets.position(pos);
|
||||
}
|
||||
|
||||
// Recurse to parent
|
||||
if let Some(parent) = source.location()
|
||||
{
|
||||
return original_position(parent.source.clone(), parent.range.start + pos);
|
||||
}
|
||||
|
||||
return (source, pos);
|
||||
}
|
||||
|
||||
/// Transforms a range to the oldest parent source
|
||||
///
|
||||
/// This function takes a range from a source and attempts to get the range's position in the oldest parent
|
||||
pub fn original_range(source: Rc<dyn Source>, mut range: Range<usize>) -> (Rc<dyn Source>, Range<usize>)
|
||||
{
|
||||
// Apply offsets
|
||||
if let Some(offsets) =
|
||||
source.downcast_ref::<VirtualSource>()
|
||||
.and_then(|source| source.offsets.as_ref())
|
||||
{
|
||||
range = offsets.position(range.start) .. offsets.position(range.end);
|
||||
}
|
||||
|
||||
// Recurse to parent
|
||||
if let Some(parent) = source.location()
|
||||
{
|
||||
//println!("FOUND PARENT={}", parent.source().name());
|
||||
return original_range(parent.source.clone(), parent.range.start + range.start..parent.range.start + range.end);
|
||||
}
|
||||
|
||||
return (source, range);
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Cursor {
|
||||
pub pos: usize,
|
||||
|
@ -151,6 +241,7 @@ impl LineCursor {
|
|||
/// # Error
|
||||
/// This function will panic if [`pos`] is not utf8 aligned
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
println!("pos={pos}");
|
||||
if self.pos < pos {
|
||||
let start = self.pos;
|
||||
let mut it = self.source.content().as_str()[start..].chars().peekable();
|
||||
|
@ -173,29 +264,6 @@ impl LineCursor {
|
|||
}
|
||||
} else if self.pos > pos {
|
||||
panic!();
|
||||
let start = self.pos;
|
||||
let mut it = self.source.content().as_str()[..start]
|
||||
.chars()
|
||||
.rev()
|
||||
.peekable();
|
||||
|
||||
let mut prev = self.source.content().as_str()[start..].chars().next();
|
||||
while self.pos > pos {
|
||||
let c = it.next().unwrap();
|
||||
let len = c.len_utf8();
|
||||
|
||||
if self.pos != start && prev == Some('\n') {
|
||||
self.line -= 1;
|
||||
self.line_pos = 0;
|
||||
}
|
||||
self.line_pos -= c.len_utf16();
|
||||
self.pos -= c.len_utf8();
|
||||
prev = Some(c);
|
||||
}
|
||||
if self.pos != start && prev == Some('\n') {
|
||||
self.line -= 1;
|
||||
self.line_pos = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// May fail if pos is not utf8-aligned
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use std::collections::HashMap;
|
||||
use std::iter::Peekable;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
@ -7,10 +9,14 @@ use crate::document::document::Document;
|
|||
use crate::document::document::DocumentAccessors;
|
||||
use crate::document::element::ElemKind;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::parser::source::original_range;
|
||||
|
||||
use super::parser::ParseMode;
|
||||
use super::parser::ParserState;
|
||||
use super::source::Source;
|
||||
use super::source::SourceFile;
|
||||
use super::source::Token;
|
||||
use super::source::VirtualSource;
|
||||
|
||||
/// Processes text for escape characters and paragraphing
|
||||
pub fn process_text(document: &dyn Document, content: &str) -> String {
|
||||
|
@ -88,6 +94,73 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
|
|||
processed
|
||||
}
|
||||
|
||||
/// Transforms source into a new [`VirtualSource`]. Transforms range from source by
|
||||
/// detecting escaped tokens.
|
||||
///
|
||||
pub fn escape_source(source: Rc<dyn Source>, range: Range<usize>, name: String, escape: char, token: &'static str) -> Rc<dyn Source>
|
||||
{
|
||||
let content = &source.content()[range.clone()];
|
||||
|
||||
let mut processed = String::new();
|
||||
let mut escaped = 0;
|
||||
let mut token_it = token.chars().peekable();
|
||||
let mut offset = 0isize;
|
||||
let mut offsets : Vec<(usize, isize)> = vec!();
|
||||
for (pos, c) in content.chars().enumerate()
|
||||
{
|
||||
if c == escape {
|
||||
escaped += 1;
|
||||
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
|
||||
let _ = token_it.next();
|
||||
if token_it.peek().is_none() {
|
||||
(0..(escaped / 2)).for_each(|_| processed.push(escape));
|
||||
if ( escaped + 1) / 2 != 0
|
||||
{
|
||||
offset += (escaped + 1) / 2;
|
||||
offsets.push((pos - token.len() - escaped as usize / 2, offset));
|
||||
}
|
||||
escaped = 0;
|
||||
token_it = token.chars().peekable();
|
||||
processed.push_str(token);
|
||||
}
|
||||
} else {
|
||||
if escaped != 0 {
|
||||
// Add escapes
|
||||
(0..escaped).for_each(|_| processed.push('\\'));
|
||||
token_it = token.chars().peekable();
|
||||
escaped = 0;
|
||||
}
|
||||
processed.push(c);
|
||||
}
|
||||
}
|
||||
// Add trailing escapes
|
||||
(0..escaped).for_each(|_| processed.push('\\'));
|
||||
|
||||
Rc::new(VirtualSource::new_offsets(
|
||||
Token::new(range, source),
|
||||
name,
|
||||
processed,
|
||||
offsets
|
||||
))
|
||||
}
|
||||
|
||||
pub fn app()
|
||||
{
|
||||
let mut s = String::new();
|
||||
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"test".to_string(),
|
||||
"a\\\\\\```b".into(),
|
||||
None,
|
||||
));
|
||||
let src = escape_source(source.clone(), 0..source.content().len(), "sub".to_string(), '\\', "```");
|
||||
println!("{}", src.content());
|
||||
let range = 0..src.content().len();
|
||||
println!("{:#?}", range);
|
||||
let orange = original_range(src.clone(), range);
|
||||
println!("{:#?}", orange);
|
||||
}
|
||||
|
||||
/// Processed a string and escapes a single token out of it
|
||||
/// Escaped characters other than the [`token`] will be not be treated as escaped
|
||||
///
|
||||
|
@ -96,6 +169,8 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
|
|||
/// assert_eq!(process_escaped('\\', "%", "escaped: \\%, also escaped: \\\\\\%, untouched: \\a"),
|
||||
/// "escaped: %, also escaped: \\%, untouched: \\a");
|
||||
/// ```
|
||||
/// TODO: Make this function return a delta to pass to the semantics, maybe store it in the virtualsource, so this function should return a source...
|
||||
#[deprecated]
|
||||
pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content: S) -> String {
|
||||
let mut processed = String::new();
|
||||
let mut escaped = 0;
|
||||
|
|
|
@ -65,7 +65,7 @@ impl Backend {
|
|||
|
||||
#[tower_lsp::async_trait]
|
||||
impl LanguageServer for Backend {
|
||||
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
|
||||
async fn initialize(&self, _params: InitializeParams) -> Result<InitializeResult> {
|
||||
Ok(InitializeResult {
|
||||
capabilities: ServerCapabilities {
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Kind(
|
||||
|
@ -139,9 +139,9 @@ impl LanguageServer for Backend {
|
|||
.await
|
||||
}
|
||||
|
||||
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
|
||||
let uri = params.text_document_position.text_document.uri;
|
||||
let position = params.text_document_position.position;
|
||||
async fn completion(&self, _params: CompletionParams) -> Result<Option<CompletionResponse>> {
|
||||
//let uri = params.text_document_position.text_document.uri;
|
||||
//let position = params.text_document_position.position;
|
||||
let completions = || -> Option<Vec<CompletionItem>> {
|
||||
let mut ret = Vec::with_capacity(0);
|
||||
|
||||
|
|
Loading…
Reference in a new issue