Lsp progress
This commit is contained in:
parent
1f9044baf6
commit
57da207a81
9 changed files with 411 additions and 176 deletions
|
@ -20,6 +20,8 @@ use mlua::Lua;
|
|||
use regex::Regex;
|
||||
use section_style::SectionLinkPos;
|
||||
use section_style::SectionStyle;
|
||||
use std::cell::RefCell;
|
||||
use std::cell::RefMut;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
@ -45,7 +47,12 @@ impl Element for Section {
|
|||
fn location(&self) -> &Token { &self.location }
|
||||
fn kind(&self) -> ElemKind { ElemKind::Block }
|
||||
fn element_name(&self) -> &'static str { "Section" }
|
||||
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> {
|
||||
fn compile(
|
||||
&self,
|
||||
compiler: &Compiler,
|
||||
_document: &dyn Document,
|
||||
_cursor: usize,
|
||||
) -> Result<String, String> {
|
||||
match compiler.target() {
|
||||
Target::HTML => {
|
||||
// Section numbering
|
||||
|
@ -321,6 +328,28 @@ impl RegexRule for SectionRule {
|
|||
}),
|
||||
);
|
||||
|
||||
//if let Some(sems) = state.shared.semantics.and_then(|sems| {
|
||||
// RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source())).ok()
|
||||
//})
|
||||
/*if let Some(sems) = state.shared.semantics
|
||||
.as_ref()
|
||||
.and_then(
|
||||
|sems| sems
|
||||
.borrow_mut()
|
||||
.get_mut(&token.source())
|
||||
.map(|v| v)
|
||||
)
|
||||
{
|
||||
}*/
|
||||
if let Some(mut sems) = state.shared.semantics.as_ref().map(|sems| {
|
||||
RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source()))
|
||||
.ok()
|
||||
.unwrap()
|
||||
}) {
|
||||
// Do something with mutable value_for_key
|
||||
sems.add(matches.get(1).unwrap().range(), 0, 0);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
|
@ -330,7 +359,13 @@ impl RegexRule for SectionRule {
|
|||
bindings.push((
|
||||
"push".to_string(),
|
||||
lua.create_function(
|
||||
|_, (title, depth, kind, reference): (String, usize, Option<String>, Option<String>)| {
|
||||
|_,
|
||||
(title, depth, kind, reference): (
|
||||
String,
|
||||
usize,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
)| {
|
||||
let kind = match kind.as_deref().unwrap_or("") {
|
||||
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
|
||||
"*" => section_kind::NO_NUMBER,
|
||||
|
@ -341,7 +376,9 @@ impl RegexRule for SectionRule {
|
|||
to: Some("push".to_string()),
|
||||
pos: 3,
|
||||
name: Some("kind".to_string()),
|
||||
cause: Arc::new(mlua::Error::external("Unknown section kind specified".to_string())),
|
||||
cause: Arc::new(mlua::Error::external(
|
||||
"Unknown section kind specified".to_string(),
|
||||
)),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
@ -503,7 +540,8 @@ nml.section.push("6", 6, "", "refname")
|
|||
let state = ParserState::new(&parser, None);
|
||||
let (_, state) = parser.parse(state, source, None);
|
||||
|
||||
let style = state.shared
|
||||
let style = state
|
||||
.shared
|
||||
.styles
|
||||
.borrow()
|
||||
.current(section_style::STYLE_KEY)
|
||||
|
@ -511,6 +549,30 @@ nml.section.push("6", 6, "", "refname")
|
|||
.unwrap();
|
||||
|
||||
assert_eq!(style.link_pos, SectionLinkPos::None);
|
||||
assert_eq!(style.link, ["a".to_string(), "b".to_string(), "c".to_string()]);
|
||||
assert_eq!(
|
||||
style.link,
|
||||
["a".to_string(), "b".to_string(), "c".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn semantics()
|
||||
{
|
||||
let source = Rc::new(SourceFile::with_content(
|
||||
"".to_string(),
|
||||
r#"
|
||||
# 1
|
||||
##+ 2
|
||||
###* 3
|
||||
####+* 4
|
||||
#####*+ 5
|
||||
######{refname} 6
|
||||
"#
|
||||
.to_string(),
|
||||
None,
|
||||
));
|
||||
let parser = LangParser::default();
|
||||
let (_, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source, None);
|
||||
println!("{:#?}", state.shared.semantics);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
use std::{cell::{Ref, RefCell, RefMut}, collections::HashMap, rc::Rc};
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{document::{customstyle::{CustomStyle, CustomStyleHolder}, document::Document, element::Element, layout::{LayoutHolder, LayoutType}, style::{ElementStyle, StyleHolder}}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}};
|
||||
use crate::parser::source::Cursor;
|
||||
use crate::parser::source::Source;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LineCursor
|
||||
{
|
||||
pub struct LineCursor {
|
||||
pub pos: usize,
|
||||
pub line: usize,
|
||||
pub line_pos: usize,
|
||||
pub source: Rc<dyn Source>,
|
||||
}
|
||||
|
||||
impl LineCursor
|
||||
{
|
||||
impl LineCursor {
|
||||
/// Creates [`LineCursor`] at position
|
||||
///
|
||||
/// # Error
|
||||
|
@ -20,63 +19,54 @@ impl LineCursor
|
|||
///
|
||||
/// Note: this is a convenience function, it should be used
|
||||
/// with parsimony as it is expensive
|
||||
pub fn at(&mut self, pos: usize)
|
||||
{
|
||||
if pos > self.pos
|
||||
{
|
||||
pub fn at(&mut self, pos: usize) {
|
||||
if pos > self.pos {
|
||||
let start = self.pos;
|
||||
//eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
||||
let mut it = self.source.content()
|
||||
.as_str()[start..] // pos+1
|
||||
eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
||||
let mut it = self.source.content().as_str()[start..] // pos+1
|
||||
.chars()
|
||||
.peekable();
|
||||
|
||||
let mut prev = self.source.content()
|
||||
.as_str()[..start+1]
|
||||
.chars()
|
||||
.rev()
|
||||
.next();
|
||||
//eprintln!("prev={prev:#?}");
|
||||
while self.pos < pos
|
||||
{
|
||||
let mut prev = self.source.content().as_str()[..start + 1]
|
||||
.chars()
|
||||
.rev()
|
||||
.next();
|
||||
eprintln!("prev={prev:#?}");
|
||||
while self.pos < pos {
|
||||
let c = it.next().unwrap();
|
||||
let len = c.len_utf8();
|
||||
|
||||
self.pos += len;
|
||||
if prev == Some('\n')
|
||||
{
|
||||
if self.pos != 0 && prev == Some('\n') {
|
||||
self.line += 1;
|
||||
self.line_pos = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
self.line_pos += len;
|
||||
}
|
||||
self.pos += len;
|
||||
|
||||
//eprintln!("({}, {c:#?}) ({} {})", self.pos, self.line, self.line_pos);
|
||||
eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos);
|
||||
prev = Some(c);
|
||||
}
|
||||
}
|
||||
else if pos < self.pos
|
||||
{
|
||||
if self.pos != 0 && prev == Some('\n') {
|
||||
self.line += 1;
|
||||
self.line_pos = 0;
|
||||
}
|
||||
} else if pos < self.pos {
|
||||
todo!("");
|
||||
self.source.content()
|
||||
.as_str()[pos..self.pos]
|
||||
self.source.content().as_str()[pos..self.pos]
|
||||
.char_indices()
|
||||
.rev()
|
||||
.for_each(|(len, c)| {
|
||||
self.pos -= len;
|
||||
if c == '\n'
|
||||
{
|
||||
if c == '\n' {
|
||||
self.line -= 1;
|
||||
}
|
||||
});
|
||||
self.line_pos = self.source.content()
|
||||
.as_str()[..self.pos]
|
||||
self.line_pos = self.source.content().as_str()[..self.pos]
|
||||
.char_indices()
|
||||
.rev()
|
||||
.find(|(_, c)| *c == '\n')
|
||||
.map(|(line_start, _)| self.pos-line_start)
|
||||
.map(|(line_start, _)| self.pos - line_start)
|
||||
.unwrap_or(0);
|
||||
}
|
||||
|
||||
|
@ -85,12 +75,11 @@ impl LineCursor
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&LineCursor> for Cursor
|
||||
{
|
||||
fn from(value: &LineCursor) -> Self {
|
||||
impl From<&LineCursor> for Cursor {
|
||||
fn from(value: &LineCursor) -> Self {
|
||||
Self {
|
||||
pos: value.pos,
|
||||
source: value.source.clone()
|
||||
source: value.source.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,56 +1,80 @@
|
|||
use std::any::Any;
|
||||
|
||||
use tower_lsp::lsp_types::{SemanticToken, SemanticTokenType};
|
||||
use tower_lsp::lsp_types::SemanticToken;
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
use crate::{document::{document::Document, element::Element}, elements::{comment::Comment, paragraph::Paragraph, section::Section}, parser::rule::Rule};
|
||||
use crate::document::document::Document;
|
||||
use crate::document::element::Element;
|
||||
use crate::elements::comment::Comment;
|
||||
use crate::elements::paragraph::Paragraph;
|
||||
use crate::elements::section::Section;
|
||||
use crate::parser::rule::Rule;
|
||||
|
||||
use super::parser::LineCursor;
|
||||
|
||||
pub trait SemanticProvider: Rule
|
||||
{
|
||||
fn get_semantic_tokens(&self, cursor: &LineCursor, match_data: Box<dyn Any>) -> Vec<SemanticToken>;
|
||||
pub trait SemanticProvider: Rule {
|
||||
fn get_semantic_tokens(
|
||||
&self,
|
||||
cursor: &LineCursor,
|
||||
match_data: Box<dyn Any>,
|
||||
) -> Vec<SemanticToken>;
|
||||
}
|
||||
|
||||
pub const LEGEND_TYPE : &[SemanticTokenType] = &[
|
||||
SemanticTokenType::COMMENT,
|
||||
SemanticTokenType::VARIABLE,
|
||||
SemanticTokenType::STRING,
|
||||
SemanticTokenType::PARAMETER,
|
||||
pub mod nml_semantic {
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
pub const SECTION_HEADING: SemanticTokenType = SemanticTokenType::new("type");
|
||||
pub const SECTION_NAME: SemanticTokenType = SemanticTokenType::new("string");
|
||||
pub const REFERENCE: SemanticTokenType = SemanticTokenType::new("event");
|
||||
}
|
||||
|
||||
pub const LEGEND_TYPE: &[SemanticTokenType] = &[
|
||||
SemanticTokenType::COMMENT,
|
||||
SemanticTokenType::VARIABLE,
|
||||
SemanticTokenType::STRING,
|
||||
SemanticTokenType::PARAMETER,
|
||||
];
|
||||
|
||||
// TODO...
|
||||
pub fn provide(semantic_tokens: &mut Vec<SemanticToken>, cursor: &mut LineCursor, elem: &Box<dyn Element>) {
|
||||
if cursor.source != elem.location().source() { return }
|
||||
pub fn provide(
|
||||
semantic_tokens: &mut Vec<SemanticToken>,
|
||||
cursor: &mut LineCursor,
|
||||
elem: &Box<dyn Element>,
|
||||
) {
|
||||
if cursor.source != elem.location().source() {
|
||||
return;
|
||||
}
|
||||
|
||||
let prev = cursor.clone();
|
||||
|
||||
if let Some(comm) = elem.downcast_ref::<Comment>()
|
||||
{
|
||||
/*if let Some(comm) = elem.downcast_ref::<Comment>() {
|
||||
cursor.at(elem.location().start());
|
||||
let delta_start = if cursor.line == prev.line
|
||||
{
|
||||
let delta_start = if cursor.line == prev.line {
|
||||
cursor.line_pos - prev.line_pos
|
||||
} else if cursor.line == 0 { cursor.line_pos }
|
||||
else { cursor.line_pos+1 };
|
||||
} else if cursor.line == 0 {
|
||||
cursor.line_pos
|
||||
} else {
|
||||
cursor.line_pos + 1
|
||||
};
|
||||
semantic_tokens.push(SemanticToken {
|
||||
delta_line: (cursor.line-prev.line) as u32,
|
||||
delta_line: (cursor.line - prev.line) as u32,
|
||||
delta_start: delta_start as u32,
|
||||
length: (elem.location().end() - elem.location().start()) as u32,
|
||||
token_type: 0,
|
||||
token_modifiers_bitset: 0,
|
||||
});
|
||||
}
|
||||
else if let Some(sect) = elem.downcast_ref::<Section>()
|
||||
{
|
||||
eprintln!("section");
|
||||
cursor.at(elem.location().start());
|
||||
let delta_start = if cursor.line == prev.line
|
||||
{
|
||||
} else */if let Some(sect) = elem.downcast_ref::<Section>() {
|
||||
cursor.at(elem.location().start() + 1);
|
||||
eprintln!("section {cursor:#?}");
|
||||
let delta_start = if cursor.line == prev.line {
|
||||
cursor.line_pos - prev.line_pos
|
||||
} else if cursor.line == 0 { cursor.line_pos }
|
||||
else { cursor.line_pos+1 };
|
||||
} else if cursor.line == 0 {
|
||||
cursor.line_pos
|
||||
} else {
|
||||
0
|
||||
};
|
||||
semantic_tokens.push(SemanticToken {
|
||||
delta_line: (cursor.line-prev.line) as u32,
|
||||
delta_line: (cursor.line - prev.line) as u32,
|
||||
delta_start: delta_start as u32,
|
||||
length: (elem.location().end() - elem.location().start()) as u32,
|
||||
token_type: 0,
|
||||
|
@ -59,8 +83,7 @@ pub fn provide(semantic_tokens: &mut Vec<SemanticToken>, cursor: &mut LineCursor
|
|||
}
|
||||
}
|
||||
|
||||
pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToken>
|
||||
{
|
||||
pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToken> {
|
||||
let mut semantic_tokens = vec![];
|
||||
|
||||
let source = document.source();
|
||||
|
@ -68,11 +91,11 @@ pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToke
|
|||
pos: 0,
|
||||
line: 0,
|
||||
line_pos: 0,
|
||||
source: source.clone()
|
||||
source: source.clone(),
|
||||
};
|
||||
|
||||
/*
|
||||
semantic_tokens.push(SemanticToken {
|
||||
delta_line: 1,
|
||||
delta_line: 2,
|
||||
delta_start: 1,
|
||||
length: 5,
|
||||
token_type: 0,
|
||||
|
@ -85,22 +108,23 @@ pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToke
|
|||
length: 5,
|
||||
token_type: 1,
|
||||
token_modifiers_bitset: 0,
|
||||
});
|
||||
});*/
|
||||
|
||||
//document.content.borrow()
|
||||
// .iter()
|
||||
// .for_each(|elem| {
|
||||
// if let Some(paragraph) = elem.downcast_ref::<Paragraph>()
|
||||
// {
|
||||
// paragraph.content
|
||||
// .iter()
|
||||
// .for_each(|elem| provide(&mut semantic_tokens, &mut cursor, elem));
|
||||
// }
|
||||
// else
|
||||
// {
|
||||
// provide(&mut semantic_tokens, &mut cursor, elem);
|
||||
// }
|
||||
// });
|
||||
document.content().borrow()
|
||||
.iter()
|
||||
.for_each(|elem| {
|
||||
if let Some(container) = elem.as_container()
|
||||
{
|
||||
container
|
||||
.contained()
|
||||
.iter()
|
||||
.for_each(|elem| provide(&mut semantic_tokens, &mut cursor, elem));
|
||||
}
|
||||
else
|
||||
{
|
||||
provide(&mut semantic_tokens, &mut cursor, elem);
|
||||
}
|
||||
});
|
||||
|
||||
semantic_tokens
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ use super::parser::Parser;
|
|||
use super::parser::ParserState;
|
||||
use super::parser::ReportColors;
|
||||
use super::rule::Rule;
|
||||
use super::semantics::Semantics;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::source::Token;
|
||||
|
@ -59,6 +60,16 @@ impl Parser for LangParser {
|
|||
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
|
||||
let doc = LangDocument::new(source.clone(), parent);
|
||||
|
||||
// Insert semantics into state
|
||||
if let Some(semantics) = state.shared.semantics.as_ref()
|
||||
{
|
||||
let mut b = semantics.borrow_mut();
|
||||
if !b.contains_key(&source)
|
||||
{
|
||||
b.insert(source.clone(), Semantics::new(source.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
let content = source.content();
|
||||
let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file
|
||||
|
||||
|
|
|
@ -7,3 +7,4 @@ pub mod util;
|
|||
pub mod style;
|
||||
pub mod layout;
|
||||
pub mod customstyle;
|
||||
pub mod semantics;
|
||||
|
|
|
@ -2,6 +2,7 @@ use ariadne::Label;
|
|||
use ariadne::Report;
|
||||
use std::any::Any;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
@ -10,6 +11,7 @@ use unicode_segmentation::UnicodeSegmentation;
|
|||
use super::customstyle::CustomStyleHolder;
|
||||
use super::layout::LayoutHolder;
|
||||
use super::rule::Rule;
|
||||
use super::semantics::Semantics;
|
||||
use super::source::Cursor;
|
||||
use super::source::Source;
|
||||
use super::state::RuleStateHolder;
|
||||
|
@ -69,17 +71,21 @@ pub struct SharedState {
|
|||
|
||||
/// The custom styles
|
||||
pub custom_styles: RefCell<CustomStyleHolder>,
|
||||
|
||||
/// The semantic map
|
||||
pub semantics: Option<RefCell<HashMap<Rc<dyn Source>, Semantics>>>,
|
||||
}
|
||||
|
||||
impl SharedState {
|
||||
/// Construct a new empty shared state
|
||||
pub(self) fn new(parser: &dyn Parser) -> Self {
|
||||
pub(self) fn new(parser: &dyn Parser, enable_semantics: bool) -> Self {
|
||||
let s = Self {
|
||||
rule_state: RefCell::new(RuleStateHolder::default()),
|
||||
kernels: RefCell::new(KernelHolder::default()),
|
||||
styles: RefCell::new(StyleHolder::default()),
|
||||
layouts: RefCell::new(LayoutHolder::default()),
|
||||
custom_styles: RefCell::new(CustomStyleHolder::default()),
|
||||
semantics: enable_semantics.then_some(RefCell::new(HashMap::new())),
|
||||
};
|
||||
|
||||
// Register default kernel
|
||||
|
@ -128,7 +134,25 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
let shared = if let Some(parent) = &parent {
|
||||
parent.shared.clone()
|
||||
} else {
|
||||
Rc::new(SharedState::new(parser))
|
||||
Rc::new(SharedState::new(parser, false))
|
||||
};
|
||||
|
||||
Self {
|
||||
parser,
|
||||
parent,
|
||||
matches: RefCell::new(matches),
|
||||
shared,
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs a new state with semantics enabled
|
||||
/// See [`ParserState::new`] for mote information
|
||||
pub fn new_with_semantics(parser: &'a dyn Parser, parent: Option<&'a ParserState<'a, 'b>>) -> Self {
|
||||
let matches = parser.rules().iter().map(|_| (0, None)).collect::<Vec<_>>();
|
||||
let shared = if let Some(parent) = &parent {
|
||||
parent.shared.clone()
|
||||
} else {
|
||||
Rc::new(SharedState::new(parser, true))
|
||||
};
|
||||
|
||||
Self {
|
||||
|
|
48
src/parser/semantics.rs
Normal file
48
src/parser/semantics.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use std::{ops::Range, rc::Rc};
|
||||
|
||||
use tower_lsp::lsp_types::SemanticToken;
|
||||
|
||||
use super::source::{LineCursor, Source};
|
||||
|
||||
|
||||
/// Semantics for a buffer
|
||||
#[derive(Debug)]
|
||||
pub struct Semantics
|
||||
{
|
||||
/// The current cursor
|
||||
cursor: LineCursor,
|
||||
|
||||
/// Semantic tokens
|
||||
tokens: Vec<SemanticToken>,
|
||||
}
|
||||
|
||||
impl Semantics
|
||||
{
|
||||
pub fn new(source: Rc<dyn Source>) -> Semantics
|
||||
{
|
||||
Self {
|
||||
cursor: LineCursor::new(source),
|
||||
tokens: vec![]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, range: Range<usize>, token_type: u32, token_modifier: u32)
|
||||
{
|
||||
let current = self.cursor.clone();
|
||||
self.cursor.move_to(range.start);
|
||||
|
||||
let delta_line = self.cursor.line - current.line;
|
||||
let delta_start = if delta_line == 0
|
||||
{
|
||||
self.cursor.line_pos - current.line_pos
|
||||
} else { self.cursor.line_pos };
|
||||
|
||||
self.tokens.push(SemanticToken{
|
||||
delta_line: delta_line as u32,
|
||||
delta_start: delta_start as u32,
|
||||
length: 10,
|
||||
token_type,
|
||||
token_modifiers_bitset: token_modifier,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -149,6 +149,87 @@ impl Clone for Cursor {
|
|||
}
|
||||
}
|
||||
|
||||
/// Cursor type used for the language server
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LineCursor {
|
||||
pub pos: usize,
|
||||
pub line: usize,
|
||||
pub line_pos: usize,
|
||||
pub source: Rc<dyn Source>,
|
||||
}
|
||||
|
||||
impl LineCursor {
|
||||
/// Creates a [`LineCursor`] at the begining of the source
|
||||
pub fn new(source: Rc<dyn Source>) -> LineCursor
|
||||
{
|
||||
Self {
|
||||
pos: 0,
|
||||
line: 0,
|
||||
line_pos: 0,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
/// Moves [`LineCursor`] to absolute position
|
||||
///
|
||||
/// # Error
|
||||
/// This function will panic if [`pos`] is not utf8 aligned
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
if pos > self.pos {
|
||||
let start = self.pos;
|
||||
eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
|
||||
let mut it = self.source.content().as_str()[start..] // pos+1
|
||||
.chars()
|
||||
.peekable();
|
||||
|
||||
let mut prev = self.source.content().as_str()[..start + 1]
|
||||
.chars()
|
||||
.rev()
|
||||
.next();
|
||||
eprintln!("prev={prev:#?}");
|
||||
while self.pos < pos {
|
||||
let c = it.next().unwrap();
|
||||
let len = c.len_utf8();
|
||||
|
||||
if self.pos != 0 && prev == Some('\n') {
|
||||
self.line += 1;
|
||||
self.line_pos = 0;
|
||||
} else {
|
||||
self.line_pos += len;
|
||||
}
|
||||
self.pos += len;
|
||||
|
||||
eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos);
|
||||
prev = Some(c);
|
||||
}
|
||||
if self.pos != 0 && prev == Some('\n') {
|
||||
self.line += 1;
|
||||
self.line_pos = 0;
|
||||
}
|
||||
} else if pos < self.pos {
|
||||
todo!("Going back is not supported");
|
||||
self.source.content().as_str()[pos..self.pos]
|
||||
.char_indices()
|
||||
.rev()
|
||||
.for_each(|(len, c)| {
|
||||
self.pos -= len;
|
||||
if c == '\n' {
|
||||
self.line -= 1;
|
||||
}
|
||||
});
|
||||
self.line_pos = self.source.content().as_str()[..self.pos]
|
||||
.char_indices()
|
||||
.rev()
|
||||
.find(|(_, c)| *c == '\n')
|
||||
.map(|(line_start, _)| self.pos - line_start)
|
||||
.unwrap_or(0);
|
||||
}
|
||||
|
||||
// May fail if pos is not utf8-aligned
|
||||
assert_eq!(pos, self.pos);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Token {
|
||||
pub range: Range<usize>,
|
||||
|
|
147
src/server.rs
147
src/server.rs
|
@ -1,11 +1,10 @@
|
|||
#![feature(char_indices_offset)]
|
||||
mod document;
|
||||
mod compiler;
|
||||
mod parser;
|
||||
mod elements;
|
||||
mod lua;
|
||||
mod cache;
|
||||
mod compiler;
|
||||
mod document;
|
||||
mod elements;
|
||||
mod lsp;
|
||||
mod lua;
|
||||
mod parser;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
|
@ -14,21 +13,25 @@ use std::sync::Arc;
|
|||
use dashmap::DashMap;
|
||||
use document::document::Document;
|
||||
use document::element::Element;
|
||||
use lsp::semantic::{semantic_token_from_document, LEGEND_TYPE};
|
||||
use lsp::semantic::semantic_token_from_document;
|
||||
use parser::langparser::LangParser;
|
||||
use parser::parser::Parser;
|
||||
use parser::parser::ParserState;
|
||||
use parser::source::SourceFile;
|
||||
use tower_lsp::jsonrpc::Result;
|
||||
use tower_lsp::lsp_types::*;
|
||||
use tower_lsp::{Client, LanguageServer, LspService, Server};
|
||||
use tower_lsp::Client;
|
||||
use tower_lsp::LanguageServer;
|
||||
use tower_lsp::LspService;
|
||||
use tower_lsp::Server;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Backend {
|
||||
client: Client,
|
||||
client: Client,
|
||||
document_map: DashMap<String, String>,
|
||||
//ast_map: DashMap<String, Vec<Box<dyn Element>>>,
|
||||
//variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>,
|
||||
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
||||
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -44,27 +47,24 @@ impl Backend {
|
|||
|
||||
// TODO: Create a custom parser for the lsp
|
||||
// Which will require a dyn Document to work
|
||||
let source = SourceFile::with_content(
|
||||
params.uri.to_string(),
|
||||
params.text.clone(),
|
||||
None);
|
||||
let source = Rc::new(SourceFile::with_content(params.uri.to_string(), params.text.clone(), None));
|
||||
let parser = LangParser::default();
|
||||
let doc = parser.parse(Rc::new(source), None);
|
||||
let (doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None);
|
||||
|
||||
//let semantic_tokens = semantic_token_from_document(&doc);
|
||||
//self.semantic_token_map
|
||||
// .insert(params.uri.to_string(), semantic_tokens);
|
||||
self.semantic_token_map
|
||||
.insert(params.uri.to_string(),
|
||||
state.shared.semantics.);
|
||||
}
|
||||
}
|
||||
|
||||
#[tower_lsp::async_trait]
|
||||
impl LanguageServer for Backend {
|
||||
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
|
||||
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
|
||||
Ok(InitializeResult {
|
||||
server_info: None,
|
||||
capabilities: ServerCapabilities {
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Kind(
|
||||
TextDocumentSyncKind::FULL,
|
||||
TextDocumentSyncKind::FULL,
|
||||
)),
|
||||
completion_provider: Some(CompletionOptions {
|
||||
resolve_provider: Some(false),
|
||||
|
@ -74,44 +74,42 @@ impl LanguageServer for Backend {
|
|||
completion_item: None,
|
||||
}),
|
||||
semantic_tokens_provider: Some(
|
||||
SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
||||
SemanticTokensRegistrationOptions {
|
||||
text_document_registration_options: {
|
||||
TextDocumentRegistrationOptions {
|
||||
document_selector: Some(vec![DocumentFilter {
|
||||
language: Some("nml".to_string()),
|
||||
scheme: Some("file".to_string()),
|
||||
pattern: None,
|
||||
}]),
|
||||
}
|
||||
},
|
||||
semantic_tokens_options: SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: LEGEND_TYPE.into(),
|
||||
token_modifiers: vec![],
|
||||
},
|
||||
range: None, //Some(true),
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
},
|
||||
static_registration_options: StaticRegistrationOptions::default(),
|
||||
},
|
||||
),
|
||||
),
|
||||
SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
||||
SemanticTokensRegistrationOptions {
|
||||
text_document_registration_options: {
|
||||
TextDocumentRegistrationOptions {
|
||||
document_selector: Some(vec![DocumentFilter {
|
||||
language: Some("nml".to_string()),
|
||||
scheme: Some("file".to_string()),
|
||||
pattern: None,
|
||||
}]),
|
||||
}
|
||||
},
|
||||
semantic_tokens_options: SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: lsp::semantic::LEGEND_TYPE.into(),
|
||||
token_modifiers: vec![],
|
||||
},
|
||||
range: None, //Some(true),
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
},
|
||||
static_registration_options: StaticRegistrationOptions::default(),
|
||||
},
|
||||
),
|
||||
),
|
||||
..ServerCapabilities::default()
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn initialized(&self, _: InitializedParams) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, "server initialized!")
|
||||
.await;
|
||||
}
|
||||
async fn initialized(&self, _: InitializedParams) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, "server initialized!")
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn shutdown(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
async fn shutdown(&self) -> Result<()> { Ok(()) }
|
||||
|
||||
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||
self.client
|
||||
|
@ -132,16 +130,16 @@ impl LanguageServer for Backend {
|
|||
.await
|
||||
}
|
||||
|
||||
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
|
||||
let uri = params.text_document_position.text_document.uri;
|
||||
let position = params.text_document_position.position;
|
||||
let completions = || -> Option<Vec<CompletionItem>> {
|
||||
let mut ret = Vec::with_capacity(0);
|
||||
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
|
||||
let uri = params.text_document_position.text_document.uri;
|
||||
let position = params.text_document_position.position;
|
||||
let completions = || -> Option<Vec<CompletionItem>> {
|
||||
let mut ret = Vec::with_capacity(0);
|
||||
|
||||
Some(ret)
|
||||
}();
|
||||
Ok(completions.map(CompletionResponse::Array))
|
||||
}
|
||||
Some(ret)
|
||||
}();
|
||||
Ok(completions.map(CompletionResponse::Array))
|
||||
}
|
||||
|
||||
async fn semantic_tokens_full(
|
||||
&self,
|
||||
|
@ -153,10 +151,9 @@ impl LanguageServer for Backend {
|
|||
.await;
|
||||
|
||||
if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) {
|
||||
let data = semantic_tokens.iter()
|
||||
.filter_map(|token| {
|
||||
Some(token.clone())
|
||||
})
|
||||
let data = semantic_tokens
|
||||
.iter()
|
||||
.filter_map(|token| Some(token.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||
|
@ -170,15 +167,13 @@ impl LanguageServer for Backend {
|
|||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let stdin = tokio::io::stdin();
|
||||
let stdout = tokio::io::stdout();
|
||||
let stdin = tokio::io::stdin();
|
||||
let stdout = tokio::io::stdout();
|
||||
|
||||
let (service, socket) = LspService::new(
|
||||
|client|
|
||||
Backend {
|
||||
client,
|
||||
document_map: DashMap::new(),
|
||||
semantic_token_map: DashMap::new(),
|
||||
});
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
let (service, socket) = LspService::new(|client| Backend {
|
||||
client,
|
||||
document_map: DashMap::new(),
|
||||
semantic_token_map: DashMap::new(),
|
||||
});
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue