Lsp progress
This commit is contained in:
parent
57da207a81
commit
1965ff6006
4 changed files with 61 additions and 63 deletions
|
@ -328,26 +328,21 @@ impl RegexRule for SectionRule {
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
//if let Some(sems) = state.shared.semantics.and_then(|sems| {
|
|
||||||
// RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source())).ok()
|
|
||||||
//})
|
|
||||||
/*if let Some(sems) = state.shared.semantics
|
|
||||||
.as_ref()
|
|
||||||
.and_then(
|
|
||||||
|sems| sems
|
|
||||||
.borrow_mut()
|
|
||||||
.get_mut(&token.source())
|
|
||||||
.map(|v| v)
|
|
||||||
)
|
|
||||||
{
|
|
||||||
}*/
|
|
||||||
if let Some(mut sems) = state.shared.semantics.as_ref().map(|sems| {
|
if let Some(mut sems) = state.shared.semantics.as_ref().map(|sems| {
|
||||||
RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source()))
|
RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source()))
|
||||||
.ok()
|
.ok()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}) {
|
}) {
|
||||||
// Do something with mutable value_for_key
|
sems.add(token.source(), matches.get(1).unwrap().range(), 0, 0);
|
||||||
sems.add(matches.get(1).unwrap().range(), 0, 0);
|
if let Some(reference) = matches.get(2)
|
||||||
|
{
|
||||||
|
sems.add(token.source(), reference.start()-1..reference.end()+1, 1, 0);
|
||||||
|
}
|
||||||
|
if let Some(kind) = matches.get(3)
|
||||||
|
{
|
||||||
|
sems.add(token.source(), kind.range(), 3, 0);
|
||||||
|
}
|
||||||
|
//sems.add(token.source(), matches.get(5).unwrap().range(), 2, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -561,12 +556,7 @@ nml.section.push("6", 6, "", "refname")
|
||||||
let source = Rc::new(SourceFile::with_content(
|
let source = Rc::new(SourceFile::with_content(
|
||||||
"".to_string(),
|
"".to_string(),
|
||||||
r#"
|
r#"
|
||||||
# 1
|
#{か} test
|
||||||
##+ 2
|
|
||||||
###* 3
|
|
||||||
####+* 4
|
|
||||||
#####*+ 5
|
|
||||||
######{refname} 6
|
|
||||||
"#
|
"#
|
||||||
.to_string(),
|
.to_string(),
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -13,7 +13,7 @@ pub struct Semantics
|
||||||
cursor: LineCursor,
|
cursor: LineCursor,
|
||||||
|
|
||||||
/// Semantic tokens
|
/// Semantic tokens
|
||||||
tokens: Vec<SemanticToken>,
|
pub tokens: Vec<SemanticToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Semantics
|
impl Semantics
|
||||||
|
@ -26,23 +26,40 @@ impl Semantics
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, range: Range<usize>, token_type: u32, token_modifier: u32)
|
pub fn add(&mut self, source: Rc<dyn Source>, range: Range<usize>, token_type: u32, token_modifier: u32)
|
||||||
{
|
{
|
||||||
let current = self.cursor.clone();
|
let mut current = self.cursor.clone();
|
||||||
self.cursor.move_to(range.start);
|
self.cursor.move_to(range.start);
|
||||||
|
|
||||||
|
while self.cursor.pos != range.end
|
||||||
|
{
|
||||||
|
let end = source.content()[self.cursor.pos..].find('\n')
|
||||||
|
.unwrap_or(source.content().len() - self.cursor.pos);
|
||||||
|
let len = usize::min(range.end - self.cursor.pos, end);
|
||||||
|
|
||||||
let delta_line = self.cursor.line - current.line;
|
let delta_line = self.cursor.line - current.line;
|
||||||
let delta_start = if delta_line == 0
|
let delta_start = if delta_line == 0
|
||||||
|
{
|
||||||
|
if let Some(last) = self.tokens.last()
|
||||||
|
{
|
||||||
|
self.cursor.line_pos - current.line_pos + last.length as usize
|
||||||
|
}
|
||||||
|
else
|
||||||
{
|
{
|
||||||
self.cursor.line_pos - current.line_pos
|
self.cursor.line_pos - current.line_pos
|
||||||
|
}
|
||||||
} else { self.cursor.line_pos };
|
} else { self.cursor.line_pos };
|
||||||
|
|
||||||
|
eprintln!("CURRENT={:#?}, CURS={:#?}", current, self.cursor);
|
||||||
self.tokens.push(SemanticToken{
|
self.tokens.push(SemanticToken{
|
||||||
delta_line: delta_line as u32,
|
delta_line: delta_line as u32,
|
||||||
delta_start: delta_start as u32,
|
delta_start: delta_start as u32,
|
||||||
length: 10,
|
length: len as u32,
|
||||||
token_type,
|
token_type,
|
||||||
token_modifiers_bitset: token_modifier,
|
token_modifiers_bitset: token_modifier,
|
||||||
});
|
});
|
||||||
|
current = self.cursor.clone();
|
||||||
|
self.cursor.move_to(self.cursor.pos + len);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use std::rc::Rc;
|
||||||
|
|
||||||
use downcast_rs::impl_downcast;
|
use downcast_rs::impl_downcast;
|
||||||
use downcast_rs::Downcast;
|
use downcast_rs::Downcast;
|
||||||
|
use unicode_width::UnicodeWidthChar;
|
||||||
|
|
||||||
/// Trait for source content
|
/// Trait for source content
|
||||||
pub trait Source: Downcast {
|
pub trait Source: Downcast {
|
||||||
|
@ -175,18 +176,18 @@ impl LineCursor {
|
||||||
/// # Error
|
/// # Error
|
||||||
/// This function will panic if [`pos`] is not utf8 aligned
|
/// This function will panic if [`pos`] is not utf8 aligned
|
||||||
pub fn move_to(&mut self, pos: usize) {
|
pub fn move_to(&mut self, pos: usize) {
|
||||||
if pos > self.pos {
|
if self.pos < pos {
|
||||||
let start = self.pos;
|
let start = self.pos;
|
||||||
eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
//eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
||||||
let mut it = self.source.content().as_str()[start..] // pos+1
|
let mut it = self.source.content().as_str()[start..] // pos+1
|
||||||
.chars()
|
.chars()
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
let mut prev = self.source.content().as_str()[..start + 1]
|
let mut prev = self.source.content().as_str()[..start]
|
||||||
.chars()
|
.chars()
|
||||||
.rev()
|
.rev()
|
||||||
.next();
|
.next();
|
||||||
eprintln!("prev={prev:#?}");
|
//eprintln!("prev={prev:#?}");
|
||||||
while self.pos < pos {
|
while self.pos < pos {
|
||||||
let c = it.next().unwrap();
|
let c = it.next().unwrap();
|
||||||
let len = c.len_utf8();
|
let len = c.len_utf8();
|
||||||
|
@ -194,35 +195,19 @@ impl LineCursor {
|
||||||
if self.pos != 0 && prev == Some('\n') {
|
if self.pos != 0 && prev == Some('\n') {
|
||||||
self.line += 1;
|
self.line += 1;
|
||||||
self.line_pos = 0;
|
self.line_pos = 0;
|
||||||
} else {
|
|
||||||
self.line_pos += len;
|
|
||||||
}
|
}
|
||||||
|
self.line_pos += c.width().unwrap_or(1);
|
||||||
self.pos += len;
|
self.pos += len;
|
||||||
|
|
||||||
eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos);
|
eprintln!("({}, {c:#?}, {} {}, {})", self.pos, self.line, self.line_pos, prev.unwrap_or(' '));
|
||||||
prev = Some(c);
|
prev = Some(c);
|
||||||
}
|
}
|
||||||
if self.pos != 0 && prev == Some('\n') {
|
if self.pos != 0 && prev == Some('\n') {
|
||||||
self.line += 1;
|
self.line += 1;
|
||||||
self.line_pos = 0;
|
self.line_pos = 0;
|
||||||
}
|
}
|
||||||
} else if pos < self.pos {
|
} else if self.pos > pos {
|
||||||
todo!("Going back is not supported");
|
panic!("Going back is not supported");
|
||||||
self.source.content().as_str()[pos..self.pos]
|
|
||||||
.char_indices()
|
|
||||||
.rev()
|
|
||||||
.for_each(|(len, c)| {
|
|
||||||
self.pos -= len;
|
|
||||||
if c == '\n' {
|
|
||||||
self.line -= 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
self.line_pos = self.source.content().as_str()[..self.pos]
|
|
||||||
.char_indices()
|
|
||||||
.rev()
|
|
||||||
.find(|(_, c)| *c == '\n')
|
|
||||||
.map(|(line_start, _)| self.pos - line_start)
|
|
||||||
.unwrap_or(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// May fail if pos is not utf8-aligned
|
// May fail if pos is not utf8-aligned
|
||||||
|
|
|
@ -17,6 +17,7 @@ use lsp::semantic::semantic_token_from_document;
|
||||||
use parser::langparser::LangParser;
|
use parser::langparser::LangParser;
|
||||||
use parser::parser::Parser;
|
use parser::parser::Parser;
|
||||||
use parser::parser::ParserState;
|
use parser::parser::ParserState;
|
||||||
|
use parser::semantics::Semantics;
|
||||||
use parser::source::SourceFile;
|
use parser::source::SourceFile;
|
||||||
use tower_lsp::jsonrpc::Result;
|
use tower_lsp::jsonrpc::Result;
|
||||||
use tower_lsp::lsp_types::*;
|
use tower_lsp::lsp_types::*;
|
||||||
|
@ -51,9 +52,14 @@ impl Backend {
|
||||||
let parser = LangParser::default();
|
let parser = LangParser::default();
|
||||||
let (doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None);
|
let (doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None);
|
||||||
|
|
||||||
|
if let Some(sems) = state.shared.semantics.as_ref().map(|sems| {
|
||||||
|
std::cell::RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&(source as Rc<dyn parser::source::Source>)))
|
||||||
|
.ok()
|
||||||
|
.unwrap()
|
||||||
|
}) {
|
||||||
self.semantic_token_map
|
self.semantic_token_map
|
||||||
.insert(params.uri.to_string(),
|
.insert(params.uri.to_string(), sems.tokens.to_owned());
|
||||||
state.shared.semantics.);
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue