From 1965ff6006ac75e556ffe1b9fbaf855cd1e3e521 Mon Sep 17 00:00:00 2001 From: ef3d0c3e Date: Thu, 17 Oct 2024 21:25:20 +0200 Subject: [PATCH] Lsp progress --- src/elements/section.rs | 32 ++++++++++------------------- src/parser/semantics.rs | 45 ++++++++++++++++++++++++++++------------- src/parser/source.rs | 35 +++++++++----------------------- src/server.rs | 12 ++++++++--- 4 files changed, 61 insertions(+), 63 deletions(-) diff --git a/src/elements/section.rs b/src/elements/section.rs index 9aba96a..c4b26e6 100644 --- a/src/elements/section.rs +++ b/src/elements/section.rs @@ -328,26 +328,21 @@ impl RegexRule for SectionRule { }), ); - //if let Some(sems) = state.shared.semantics.and_then(|sems| { - // RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source())).ok() - //}) - /*if let Some(sems) = state.shared.semantics - .as_ref() - .and_then( - |sems| sems - .borrow_mut() - .get_mut(&token.source()) - .map(|v| v) - ) - { - }*/ if let Some(mut sems) = state.shared.semantics.as_ref().map(|sems| { RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source())) .ok() .unwrap() }) { - // Do something with mutable value_for_key - sems.add(matches.get(1).unwrap().range(), 0, 0); + sems.add(token.source(), matches.get(1).unwrap().range(), 0, 0); + if let Some(reference) = matches.get(2) + { + sems.add(token.source(), reference.start()-1..reference.end()+1, 1, 0); + } + if let Some(kind) = matches.get(3) + { + sems.add(token.source(), kind.range(), 3, 0); + } + //sems.add(token.source(), matches.get(5).unwrap().range(), 2, 0); } result @@ -561,12 +556,7 @@ nml.section.push("6", 6, "", "refname") let source = Rc::new(SourceFile::with_content( "".to_string(), r#" -# 1 -##+ 2 -###* 3 -####+* 4 -#####*+ 5 -######{refname} 6 +#{か} test "# .to_string(), None, diff --git a/src/parser/semantics.rs b/src/parser/semantics.rs index d7c6bf1..b54ed3a 100644 --- a/src/parser/semantics.rs +++ b/src/parser/semantics.rs @@ -13,7 +13,7 @@ pub struct Semantics cursor: LineCursor, /// Semantic tokens - tokens: Vec, + pub tokens: Vec, } impl Semantics @@ -26,23 +26,40 @@ impl Semantics } } - pub fn add(&mut self, range: Range, token_type: u32, token_modifier: u32) + pub fn add(&mut self, source: Rc, range: Range, token_type: u32, token_modifier: u32) { - let current = self.cursor.clone(); + let mut current = self.cursor.clone(); self.cursor.move_to(range.start); - let delta_line = self.cursor.line - current.line; - let delta_start = if delta_line == 0 + while self.cursor.pos != range.end { - self.cursor.line_pos - current.line_pos - } else { self.cursor.line_pos }; + let end = source.content()[self.cursor.pos..].find('\n') + .unwrap_or(source.content().len() - self.cursor.pos); + let len = usize::min(range.end - self.cursor.pos, end); - self.tokens.push(SemanticToken{ - delta_line: delta_line as u32, - delta_start: delta_start as u32, - length: 10, - token_type, - token_modifiers_bitset: token_modifier, - }); + let delta_line = self.cursor.line - current.line; + let delta_start = if delta_line == 0 + { + if let Some(last) = self.tokens.last() + { + self.cursor.line_pos - current.line_pos + last.length as usize + } + else + { + self.cursor.line_pos - current.line_pos + } + } else { self.cursor.line_pos }; + + eprintln!("CURRENT={:#?}, CURS={:#?}", current, self.cursor); + self.tokens.push(SemanticToken{ + delta_line: delta_line as u32, + delta_start: delta_start as u32, + length: len as u32, + token_type, + token_modifiers_bitset: token_modifier, + }); + current = self.cursor.clone(); + self.cursor.move_to(self.cursor.pos + len); + } } } diff --git a/src/parser/source.rs b/src/parser/source.rs index 28655af..750388b 100644 --- a/src/parser/source.rs +++ b/src/parser/source.rs @@ -5,6 +5,7 @@ use std::rc::Rc; use downcast_rs::impl_downcast; use downcast_rs::Downcast; +use unicode_width::UnicodeWidthChar; /// Trait for source content pub trait Source: Downcast { @@ -175,18 +176,18 @@ impl LineCursor { /// # Error /// This function will panic if [`pos`] is not utf8 aligned pub fn move_to(&mut self, pos: usize) { - if pos > self.pos { + if self.pos < pos { let start = self.pos; - eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]); + //eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]); let mut it = self.source.content().as_str()[start..] // pos+1 .chars() .peekable(); - let mut prev = self.source.content().as_str()[..start + 1] + let mut prev = self.source.content().as_str()[..start] .chars() .rev() .next(); - eprintln!("prev={prev:#?}"); + //eprintln!("prev={prev:#?}"); while self.pos < pos { let c = it.next().unwrap(); let len = c.len_utf8(); @@ -194,35 +195,19 @@ impl LineCursor { if self.pos != 0 && prev == Some('\n') { self.line += 1; self.line_pos = 0; - } else { - self.line_pos += len; - } + } + self.line_pos += c.width().unwrap_or(1); self.pos += len; - eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos); + eprintln!("({}, {c:#?}, {} {}, {})", self.pos, self.line, self.line_pos, prev.unwrap_or(' ')); prev = Some(c); } if self.pos != 0 && prev == Some('\n') { self.line += 1; self.line_pos = 0; } - } else if pos < self.pos { - todo!("Going back is not supported"); - self.source.content().as_str()[pos..self.pos] - .char_indices() - .rev() - .for_each(|(len, c)| { - self.pos -= len; - if c == '\n' { - self.line -= 1; - } - }); - self.line_pos = self.source.content().as_str()[..self.pos] - .char_indices() - .rev() - .find(|(_, c)| *c == '\n') - .map(|(line_start, _)| self.pos - line_start) - .unwrap_or(0); + } else if self.pos > pos { + panic!("Going back is not supported"); } // May fail if pos is not utf8-aligned diff --git a/src/server.rs b/src/server.rs index c0576ec..06473f0 100644 --- a/src/server.rs +++ b/src/server.rs @@ -17,6 +17,7 @@ use lsp::semantic::semantic_token_from_document; use parser::langparser::LangParser; use parser::parser::Parser; use parser::parser::ParserState; +use parser::semantics::Semantics; use parser::source::SourceFile; use tower_lsp::jsonrpc::Result; use tower_lsp::lsp_types::*; @@ -51,9 +52,14 @@ impl Backend { let parser = LangParser::default(); let (doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None); - self.semantic_token_map - .insert(params.uri.to_string(), - state.shared.semantics.); + if let Some(sems) = state.shared.semantics.as_ref().map(|sems| { + std::cell::RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&(source as Rc))) + .ok() + .unwrap() + }) { + self.semantic_token_map + .insert(params.uri.to_string(), sems.tokens.to_owned()); + }; } }