Fix bug in cursor

This commit is contained in:
ef3d0c3e 2024-10-18 14:04:15 +02:00
parent bd75161e86
commit f2bd8fee97
5 changed files with 99 additions and 19 deletions

View file

@ -24,8 +24,6 @@ downcast-rs = "1.2.1"
getopts = "0.2.21"
graphviz-rust = "0.9.0"
lazy_static = "1.5.0"
lsp-server = "0.7.6"
lsp-types = "0.97.0"
mlua = { version = "0.9.9", features = ["lua54", "vendored", "serialize"] }
regex = "1.10.3"
rusqlite = "0.31.0"
@ -38,7 +36,6 @@ tokio = { version = "1.38.1", features = [
"rt-multi-thread",
"io-std",
] }
tower-lsp = "0.20.0"
unicode-segmentation = "1.11.0"
walkdir = "2.5.0"

View file

@ -455,7 +455,7 @@ mod tests {
use crate::parser::langparser::LangParser;
use crate::parser::parser::Parser;
use crate::parser::source::SourceFile;
use crate::validate_document;
use crate::{validate_document, validate_semantics};
use super::*;
@ -555,13 +555,29 @@ nml.section.push("6", 6, "", "refname")
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
#{} test
# First section
##{}+ test
#{refname}*+ Another section
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (_, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source, None);
println!("{:#?}", state.shared.semantics);
let (_, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None);
validate_semantics!(state, source.clone(), 0,
section_heading { delta_line == 1, delta_start == 0, length == 1 };
section_name { delta_line == 0, delta_start == 1 };
section_heading { delta_line == 1, delta_start == 0, length == 2 };
section_reference { delta_line == 0, delta_start == 2, length == 3 };
section_kind { delta_line == 0, delta_start == 3, length == 1 };
section_name { delta_line == 0, delta_start == 1 };
section_heading { delta_line == 1, delta_start == 0, length == 1 };
section_reference { delta_line == 0, delta_start == 1, length == 9 };
section_kind { delta_line == 0, delta_start == 9, length == 2 };
section_name { delta_line == 0, delta_start == 2 };
);
}
}

View file

@ -1,4 +1,3 @@
use std::any::Any;
use std::cell::RefCell;
use std::ops::Range;
use std::rc::Rc;
@ -143,6 +142,9 @@ impl Semantics {
.find('\n')
.unwrap_or(source.content().len() - cursor.pos);
let len = usize::min(range.end - cursor.pos, end);
let clen = source.content()[cursor.pos..cursor.pos+len]
.chars()
.fold(0, |clen, _| clen + 1);
let delta_line = cursor.line - current.line;
let delta_start = if delta_line == 0 {
@ -159,7 +161,7 @@ impl Semantics {
tokens.push(SemanticToken {
delta_line: delta_line as u32,
delta_start: delta_start as u32,
length: len as u32,
length: clen as u32,
token_type: token.0,
token_modifiers_bitset: token.1
});
@ -169,3 +171,72 @@ impl Semantics {
}
}
}
#[cfg(test)]
pub mod tests {
#[macro_export]
macro_rules! validate_semantics {
($state:expr, $source:expr, $idx:expr,) => {};
($state:expr, $source:expr, $idx:expr, $token_name:ident { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
let token = $state.shared.semantics
.as_ref()
.unwrap()
.borrow()
.get(&($source as Rc<dyn Source>))
.unwrap()
.tokens
.borrow()
[$idx];
let token_type = $state.shared.semantics
.as_ref()
.unwrap()
.borrow()
.get(&($source as Rc<dyn Source>))
.unwrap()
.token
.$token_name;
let found_token = (token.token_type, token.token_modifiers_bitset);
assert!(found_token == token_type, "Invalid token at index {}, expected {}{token_type:#?}, got: {found_token:#?}",
$idx, stringify!($token_name));
$(
let val = &token.$field;
assert!(*val == $value, "Invalid field {} at index {}, expected {:#?}, found {:#?}",
stringify!($field),
$idx,
$value,
val);
)*
validate_semantics!($state, $source, ($idx+1), $($tail)*);
}};
($state:expr, $source:expr, $idx:expr, $token_name:ident; $($tail:tt)*) => {{
let token = $state.shared.semantics
.as_ref()
.unwrap()
.borrow()
.get(&($source as Rc<dyn Source>))
.unwrap()
.tokens
.borrow()
[$idx];
let token_type = $state.shared.semantics
.as_ref()
.unwrap()
.borrow()
.get(&($source as Rc<dyn Source>))
.unwrap()
.token
.$token_name;
let found_token = (token.token_type, token.token_modifiers_bitset);
assert!(found_token == token_type, "Invalid token at index {}, expected {}{token_type:#?}, got: {found_token:#?}",
$idx, stringify!($token_name));
validate_semantics!($state, $source, ($idx+1), $($tail)*);
}};
}
}

View file

@ -5,7 +5,6 @@ use std::rc::Rc;
use downcast_rs::impl_downcast;
use downcast_rs::Downcast;
use unicode_width::UnicodeWidthChar;
/// Trait for source content
pub trait Source: Downcast {
@ -153,9 +152,13 @@ impl Clone for Cursor {
/// Cursor type used for the language server
#[derive(Debug, Clone)]
pub struct LineCursor {
/// Byte position in the source
pub pos: usize,
/// Line number
pub line: usize,
/// Position in the line
pub line_pos: usize,
/// Source
pub source: Rc<dyn Source>,
}
@ -171,14 +174,13 @@ impl LineCursor {
}
}
/// Moves [`LineCursor`] to absolute position
/// Moves [`LineCursor`] to an absolute byte position
///
/// # Error
/// This function will panic if [`pos`] is not utf8 aligned
pub fn move_to(&mut self, pos: usize) {
if self.pos < pos {
let start = self.pos;
//eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
let mut it = self.source.content().as_str()[start..] // pos+1
.chars()
.peekable();
@ -187,7 +189,6 @@ impl LineCursor {
.chars()
.rev()
.next();
//eprintln!("prev={prev:#?}");
while self.pos < pos {
let c = it.next().unwrap();
let len = c.len_utf8();
@ -196,10 +197,8 @@ impl LineCursor {
self.line += 1;
self.line_pos = 0;
}
self.line_pos += c.width().unwrap_or(1);
self.line_pos += 1;
self.pos += len;
//eprintln!("({}, {c:#?}, {} {}, {})", self.pos, self.line, self.line_pos, prev.unwrap_or(' '));
prev = Some(c);
}
if self.pos != 0 && prev == Some('\n') {

View file

@ -9,7 +9,6 @@ mod parser;
use std::rc::Rc;
use dashmap::DashMap;
use lsp::semantic::Tokens;
use parser::langparser::LangParser;
use parser::parser::Parser;
use parser::parser::ParserState;
@ -25,8 +24,6 @@ use tower_lsp::Server;
struct Backend {
client: Client,
document_map: DashMap<String, String>,
//ast_map: DashMap<String, Vec<Box<dyn Element>>>,
//variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>,
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
}