Lsp progress

This commit is contained in:
ef3d0c3e 2024-10-16 23:42:49 +02:00
parent 1f9044baf6
commit 57da207a81
9 changed files with 411 additions and 176 deletions

View file

@ -20,6 +20,8 @@ use mlua::Lua;
use regex::Regex; use regex::Regex;
use section_style::SectionLinkPos; use section_style::SectionLinkPos;
use section_style::SectionStyle; use section_style::SectionStyle;
use std::cell::RefCell;
use std::cell::RefMut;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
@ -45,7 +47,12 @@ impl Element for Section {
fn location(&self) -> &Token { &self.location } fn location(&self) -> &Token { &self.location }
fn kind(&self) -> ElemKind { ElemKind::Block } fn kind(&self) -> ElemKind { ElemKind::Block }
fn element_name(&self) -> &'static str { "Section" } fn element_name(&self) -> &'static str { "Section" }
fn compile(&self, compiler: &Compiler, _document: &dyn Document, _cursor: usize) -> Result<String, String> { fn compile(
&self,
compiler: &Compiler,
_document: &dyn Document,
_cursor: usize,
) -> Result<String, String> {
match compiler.target() { match compiler.target() {
Target::HTML => { Target::HTML => {
// Section numbering // Section numbering
@ -321,6 +328,28 @@ impl RegexRule for SectionRule {
}), }),
); );
//if let Some(sems) = state.shared.semantics.and_then(|sems| {
// RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source())).ok()
//})
/*if let Some(sems) = state.shared.semantics
.as_ref()
.and_then(
|sems| sems
.borrow_mut()
.get_mut(&token.source())
.map(|v| v)
)
{
}*/
if let Some(mut sems) = state.shared.semantics.as_ref().map(|sems| {
RefMut::filter_map(sems.borrow_mut(), |sems| sems.get_mut(&token.source()))
.ok()
.unwrap()
}) {
// Do something with mutable value_for_key
sems.add(matches.get(1).unwrap().range(), 0, 0);
}
result result
} }
@ -330,7 +359,13 @@ impl RegexRule for SectionRule {
bindings.push(( bindings.push((
"push".to_string(), "push".to_string(),
lua.create_function( lua.create_function(
|_, (title, depth, kind, reference): (String, usize, Option<String>, Option<String>)| { |_,
(title, depth, kind, reference): (
String,
usize,
Option<String>,
Option<String>,
)| {
let kind = match kind.as_deref().unwrap_or("") { let kind = match kind.as_deref().unwrap_or("") {
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC, "*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
"*" => section_kind::NO_NUMBER, "*" => section_kind::NO_NUMBER,
@ -341,7 +376,9 @@ impl RegexRule for SectionRule {
to: Some("push".to_string()), to: Some("push".to_string()),
pos: 3, pos: 3,
name: Some("kind".to_string()), name: Some("kind".to_string()),
cause: Arc::new(mlua::Error::external("Unknown section kind specified".to_string())), cause: Arc::new(mlua::Error::external(
"Unknown section kind specified".to_string(),
)),
}) })
} }
}; };
@ -503,7 +540,8 @@ nml.section.push("6", 6, "", "refname")
let state = ParserState::new(&parser, None); let state = ParserState::new(&parser, None);
let (_, state) = parser.parse(state, source, None); let (_, state) = parser.parse(state, source, None);
let style = state.shared let style = state
.shared
.styles .styles
.borrow() .borrow()
.current(section_style::STYLE_KEY) .current(section_style::STYLE_KEY)
@ -511,6 +549,30 @@ nml.section.push("6", 6, "", "refname")
.unwrap(); .unwrap();
assert_eq!(style.link_pos, SectionLinkPos::None); assert_eq!(style.link_pos, SectionLinkPos::None);
assert_eq!(style.link, ["a".to_string(), "b".to_string(), "c".to_string()]); assert_eq!(
style.link,
["a".to_string(), "b".to_string(), "c".to_string()]
);
}
#[test]
fn semantics()
{
let source = Rc::new(SourceFile::with_content(
"".to_string(),
r#"
# 1
##+ 2
###* 3
####+* 4
#####*+ 5
######{refname} 6
"#
.to_string(),
None,
));
let parser = LangParser::default();
let (_, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source, None);
println!("{:#?}", state.shared.semantics);
} }
} }

View file

@ -1,18 +1,17 @@
use std::{cell::{Ref, RefCell, RefMut}, collections::HashMap, rc::Rc}; use std::rc::Rc;
use crate::{document::{customstyle::{CustomStyle, CustomStyleHolder}, document::Document, element::Element, layout::{LayoutHolder, LayoutType}, style::{ElementStyle, StyleHolder}}, lua::kernel::{Kernel, KernelHolder}, parser::{parser::{Parser, ReportColors}, rule::Rule, source::{Cursor, Source}, state::StateHolder}}; use crate::parser::source::Cursor;
use crate::parser::source::Source;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LineCursor pub struct LineCursor {
{
pub pos: usize, pub pos: usize,
pub line: usize, pub line: usize,
pub line_pos: usize, pub line_pos: usize,
pub source: Rc<dyn Source>, pub source: Rc<dyn Source>,
} }
impl LineCursor impl LineCursor {
{
/// Creates [`LineCursor`] at position /// Creates [`LineCursor`] at position
/// ///
/// # Error /// # Error
@ -20,63 +19,54 @@ impl LineCursor
/// ///
/// Note: this is a convenience function, it should be used /// Note: this is a convenience function, it should be used
/// with parsimony as it is expensive /// with parsimony as it is expensive
pub fn at(&mut self, pos: usize) pub fn at(&mut self, pos: usize) {
{ if pos > self.pos {
if pos > self.pos
{
let start = self.pos; let start = self.pos;
//eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]); eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
let mut it = self.source.content() let mut it = self.source.content().as_str()[start..] // pos+1
.as_str()[start..] // pos+1
.chars() .chars()
.peekable(); .peekable();
let mut prev = self.source.content() let mut prev = self.source.content().as_str()[..start + 1]
.as_str()[..start+1] .chars()
.chars() .rev()
.rev() .next();
.next(); eprintln!("prev={prev:#?}");
//eprintln!("prev={prev:#?}"); while self.pos < pos {
while self.pos < pos
{
let c = it.next().unwrap(); let c = it.next().unwrap();
let len = c.len_utf8(); let len = c.len_utf8();
self.pos += len; if self.pos != 0 && prev == Some('\n') {
if prev == Some('\n')
{
self.line += 1; self.line += 1;
self.line_pos = 0; self.line_pos = 0;
} } else {
else
{
self.line_pos += len; self.line_pos += len;
} }
self.pos += len;
//eprintln!("({}, {c:#?}) ({} {})", self.pos, self.line, self.line_pos); eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos);
prev = Some(c); prev = Some(c);
} }
} if self.pos != 0 && prev == Some('\n') {
else if pos < self.pos self.line += 1;
{ self.line_pos = 0;
}
} else if pos < self.pos {
todo!(""); todo!("");
self.source.content() self.source.content().as_str()[pos..self.pos]
.as_str()[pos..self.pos]
.char_indices() .char_indices()
.rev() .rev()
.for_each(|(len, c)| { .for_each(|(len, c)| {
self.pos -= len; self.pos -= len;
if c == '\n' if c == '\n' {
{
self.line -= 1; self.line -= 1;
} }
}); });
self.line_pos = self.source.content() self.line_pos = self.source.content().as_str()[..self.pos]
.as_str()[..self.pos]
.char_indices() .char_indices()
.rev() .rev()
.find(|(_, c)| *c == '\n') .find(|(_, c)| *c == '\n')
.map(|(line_start, _)| self.pos-line_start) .map(|(line_start, _)| self.pos - line_start)
.unwrap_or(0); .unwrap_or(0);
} }
@ -85,12 +75,11 @@ impl LineCursor
} }
} }
impl From<&LineCursor> for Cursor impl From<&LineCursor> for Cursor {
{ fn from(value: &LineCursor) -> Self {
fn from(value: &LineCursor) -> Self {
Self { Self {
pos: value.pos, pos: value.pos,
source: value.source.clone() source: value.source.clone(),
} }
} }
} }

View file

@ -1,56 +1,80 @@
use std::any::Any; use std::any::Any;
use tower_lsp::lsp_types::{SemanticToken, SemanticTokenType}; use tower_lsp::lsp_types::SemanticToken;
use tower_lsp::lsp_types::SemanticTokenType;
use crate::{document::{document::Document, element::Element}, elements::{comment::Comment, paragraph::Paragraph, section::Section}, parser::rule::Rule}; use crate::document::document::Document;
use crate::document::element::Element;
use crate::elements::comment::Comment;
use crate::elements::paragraph::Paragraph;
use crate::elements::section::Section;
use crate::parser::rule::Rule;
use super::parser::LineCursor; use super::parser::LineCursor;
pub trait SemanticProvider: Rule pub trait SemanticProvider: Rule {
{ fn get_semantic_tokens(
fn get_semantic_tokens(&self, cursor: &LineCursor, match_data: Box<dyn Any>) -> Vec<SemanticToken>; &self,
cursor: &LineCursor,
match_data: Box<dyn Any>,
) -> Vec<SemanticToken>;
} }
pub const LEGEND_TYPE : &[SemanticTokenType] = &[ pub mod nml_semantic {
SemanticTokenType::COMMENT, use tower_lsp::lsp_types::SemanticTokenType;
SemanticTokenType::VARIABLE,
SemanticTokenType::STRING, pub const SECTION_HEADING: SemanticTokenType = SemanticTokenType::new("type");
SemanticTokenType::PARAMETER, pub const SECTION_NAME: SemanticTokenType = SemanticTokenType::new("string");
pub const REFERENCE: SemanticTokenType = SemanticTokenType::new("event");
}
pub const LEGEND_TYPE: &[SemanticTokenType] = &[
SemanticTokenType::COMMENT,
SemanticTokenType::VARIABLE,
SemanticTokenType::STRING,
SemanticTokenType::PARAMETER,
]; ];
// TODO... // TODO...
pub fn provide(semantic_tokens: &mut Vec<SemanticToken>, cursor: &mut LineCursor, elem: &Box<dyn Element>) { pub fn provide(
if cursor.source != elem.location().source() { return } semantic_tokens: &mut Vec<SemanticToken>,
cursor: &mut LineCursor,
elem: &Box<dyn Element>,
) {
if cursor.source != elem.location().source() {
return;
}
let prev = cursor.clone(); let prev = cursor.clone();
if let Some(comm) = elem.downcast_ref::<Comment>() /*if let Some(comm) = elem.downcast_ref::<Comment>() {
{
cursor.at(elem.location().start()); cursor.at(elem.location().start());
let delta_start = if cursor.line == prev.line let delta_start = if cursor.line == prev.line {
{
cursor.line_pos - prev.line_pos cursor.line_pos - prev.line_pos
} else if cursor.line == 0 { cursor.line_pos } } else if cursor.line == 0 {
else { cursor.line_pos+1 }; cursor.line_pos
} else {
cursor.line_pos + 1
};
semantic_tokens.push(SemanticToken { semantic_tokens.push(SemanticToken {
delta_line: (cursor.line-prev.line) as u32, delta_line: (cursor.line - prev.line) as u32,
delta_start: delta_start as u32, delta_start: delta_start as u32,
length: (elem.location().end() - elem.location().start()) as u32, length: (elem.location().end() - elem.location().start()) as u32,
token_type: 0, token_type: 0,
token_modifiers_bitset: 0, token_modifiers_bitset: 0,
}); });
} } else */if let Some(sect) = elem.downcast_ref::<Section>() {
else if let Some(sect) = elem.downcast_ref::<Section>() cursor.at(elem.location().start() + 1);
{ eprintln!("section {cursor:#?}");
eprintln!("section"); let delta_start = if cursor.line == prev.line {
cursor.at(elem.location().start());
let delta_start = if cursor.line == prev.line
{
cursor.line_pos - prev.line_pos cursor.line_pos - prev.line_pos
} else if cursor.line == 0 { cursor.line_pos } } else if cursor.line == 0 {
else { cursor.line_pos+1 }; cursor.line_pos
} else {
0
};
semantic_tokens.push(SemanticToken { semantic_tokens.push(SemanticToken {
delta_line: (cursor.line-prev.line) as u32, delta_line: (cursor.line - prev.line) as u32,
delta_start: delta_start as u32, delta_start: delta_start as u32,
length: (elem.location().end() - elem.location().start()) as u32, length: (elem.location().end() - elem.location().start()) as u32,
token_type: 0, token_type: 0,
@ -59,8 +83,7 @@ pub fn provide(semantic_tokens: &mut Vec<SemanticToken>, cursor: &mut LineCursor
} }
} }
pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToken> pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToken> {
{
let mut semantic_tokens = vec![]; let mut semantic_tokens = vec![];
let source = document.source(); let source = document.source();
@ -68,11 +91,11 @@ pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToke
pos: 0, pos: 0,
line: 0, line: 0,
line_pos: 0, line_pos: 0,
source: source.clone() source: source.clone(),
}; };
/*
semantic_tokens.push(SemanticToken { semantic_tokens.push(SemanticToken {
delta_line: 1, delta_line: 2,
delta_start: 1, delta_start: 1,
length: 5, length: 5,
token_type: 0, token_type: 0,
@ -85,22 +108,23 @@ pub fn semantic_token_from_document(document: &dyn Document) -> Vec<SemanticToke
length: 5, length: 5,
token_type: 1, token_type: 1,
token_modifiers_bitset: 0, token_modifiers_bitset: 0,
}); });*/
//document.content.borrow() document.content().borrow()
// .iter() .iter()
// .for_each(|elem| { .for_each(|elem| {
// if let Some(paragraph) = elem.downcast_ref::<Paragraph>() if let Some(container) = elem.as_container()
// { {
// paragraph.content container
// .iter() .contained()
// .for_each(|elem| provide(&mut semantic_tokens, &mut cursor, elem)); .iter()
// } .for_each(|elem| provide(&mut semantic_tokens, &mut cursor, elem));
// else }
// { else
// provide(&mut semantic_tokens, &mut cursor, elem); {
// } provide(&mut semantic_tokens, &mut cursor, elem);
// }); }
});
semantic_tokens semantic_tokens
} }

View file

@ -10,6 +10,7 @@ use super::parser::Parser;
use super::parser::ParserState; use super::parser::ParserState;
use super::parser::ReportColors; use super::parser::ReportColors;
use super::rule::Rule; use super::rule::Rule;
use super::semantics::Semantics;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source; use super::source::Source;
use super::source::Token; use super::source::Token;
@ -59,6 +60,16 @@ impl Parser for LangParser {
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) { ) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
let doc = LangDocument::new(source.clone(), parent); let doc = LangDocument::new(source.clone(), parent);
// Insert semantics into state
if let Some(semantics) = state.shared.semantics.as_ref()
{
let mut b = semantics.borrow_mut();
if !b.contains_key(&source)
{
b.insert(source.clone(), Semantics::new(source.clone()));
}
}
let content = source.content(); let content = source.content();
let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file let mut cursor = Cursor::new(0usize, doc.source()); // Cursor in file

View file

@ -7,3 +7,4 @@ pub mod util;
pub mod style; pub mod style;
pub mod layout; pub mod layout;
pub mod customstyle; pub mod customstyle;
pub mod semantics;

View file

@ -2,6 +2,7 @@ use ariadne::Label;
use ariadne::Report; use ariadne::Report;
use std::any::Any; use std::any::Any;
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc; use std::rc::Rc;
@ -10,6 +11,7 @@ use unicode_segmentation::UnicodeSegmentation;
use super::customstyle::CustomStyleHolder; use super::customstyle::CustomStyleHolder;
use super::layout::LayoutHolder; use super::layout::LayoutHolder;
use super::rule::Rule; use super::rule::Rule;
use super::semantics::Semantics;
use super::source::Cursor; use super::source::Cursor;
use super::source::Source; use super::source::Source;
use super::state::RuleStateHolder; use super::state::RuleStateHolder;
@ -69,17 +71,21 @@ pub struct SharedState {
/// The custom styles /// The custom styles
pub custom_styles: RefCell<CustomStyleHolder>, pub custom_styles: RefCell<CustomStyleHolder>,
/// The semantic map
pub semantics: Option<RefCell<HashMap<Rc<dyn Source>, Semantics>>>,
} }
impl SharedState { impl SharedState {
/// Construct a new empty shared state /// Construct a new empty shared state
pub(self) fn new(parser: &dyn Parser) -> Self { pub(self) fn new(parser: &dyn Parser, enable_semantics: bool) -> Self {
let s = Self { let s = Self {
rule_state: RefCell::new(RuleStateHolder::default()), rule_state: RefCell::new(RuleStateHolder::default()),
kernels: RefCell::new(KernelHolder::default()), kernels: RefCell::new(KernelHolder::default()),
styles: RefCell::new(StyleHolder::default()), styles: RefCell::new(StyleHolder::default()),
layouts: RefCell::new(LayoutHolder::default()), layouts: RefCell::new(LayoutHolder::default()),
custom_styles: RefCell::new(CustomStyleHolder::default()), custom_styles: RefCell::new(CustomStyleHolder::default()),
semantics: enable_semantics.then_some(RefCell::new(HashMap::new())),
}; };
// Register default kernel // Register default kernel
@ -128,7 +134,25 @@ impl<'a, 'b> ParserState<'a, 'b> {
let shared = if let Some(parent) = &parent { let shared = if let Some(parent) = &parent {
parent.shared.clone() parent.shared.clone()
} else { } else {
Rc::new(SharedState::new(parser)) Rc::new(SharedState::new(parser, false))
};
Self {
parser,
parent,
matches: RefCell::new(matches),
shared,
}
}
/// Constructs a new state with semantics enabled
/// See [`ParserState::new`] for mote information
pub fn new_with_semantics(parser: &'a dyn Parser, parent: Option<&'a ParserState<'a, 'b>>) -> Self {
let matches = parser.rules().iter().map(|_| (0, None)).collect::<Vec<_>>();
let shared = if let Some(parent) = &parent {
parent.shared.clone()
} else {
Rc::new(SharedState::new(parser, true))
}; };
Self { Self {

48
src/parser/semantics.rs Normal file
View file

@ -0,0 +1,48 @@
use std::{ops::Range, rc::Rc};
use tower_lsp::lsp_types::SemanticToken;
use super::source::{LineCursor, Source};
/// Semantics for a buffer
#[derive(Debug)]
pub struct Semantics
{
/// The current cursor
cursor: LineCursor,
/// Semantic tokens
tokens: Vec<SemanticToken>,
}
impl Semantics
{
pub fn new(source: Rc<dyn Source>) -> Semantics
{
Self {
cursor: LineCursor::new(source),
tokens: vec![]
}
}
pub fn add(&mut self, range: Range<usize>, token_type: u32, token_modifier: u32)
{
let current = self.cursor.clone();
self.cursor.move_to(range.start);
let delta_line = self.cursor.line - current.line;
let delta_start = if delta_line == 0
{
self.cursor.line_pos - current.line_pos
} else { self.cursor.line_pos };
self.tokens.push(SemanticToken{
delta_line: delta_line as u32,
delta_start: delta_start as u32,
length: 10,
token_type,
token_modifiers_bitset: token_modifier,
});
}
}

View file

@ -149,6 +149,87 @@ impl Clone for Cursor {
} }
} }
/// Cursor type used for the language server
#[derive(Debug, Clone)]
pub struct LineCursor {
pub pos: usize,
pub line: usize,
pub line_pos: usize,
pub source: Rc<dyn Source>,
}
impl LineCursor {
/// Creates a [`LineCursor`] at the begining of the source
pub fn new(source: Rc<dyn Source>) -> LineCursor
{
Self {
pos: 0,
line: 0,
line_pos: 0,
source,
}
}
/// Moves [`LineCursor`] to absolute position
///
/// # Error
/// This function will panic if [`pos`] is not utf8 aligned
pub fn move_to(&mut self, pos: usize) {
if pos > self.pos {
let start = self.pos;
eprintln!("slice{{{}}}, want={pos}", &self.source.content().as_str()[start..pos]);
let mut it = self.source.content().as_str()[start..] // pos+1
.chars()
.peekable();
let mut prev = self.source.content().as_str()[..start + 1]
.chars()
.rev()
.next();
eprintln!("prev={prev:#?}");
while self.pos < pos {
let c = it.next().unwrap();
let len = c.len_utf8();
if self.pos != 0 && prev == Some('\n') {
self.line += 1;
self.line_pos = 0;
} else {
self.line_pos += len;
}
self.pos += len;
eprintln!("({}, {c:#?}, {} {}, {prev:#?})", self.pos, self.line, self.line_pos);
prev = Some(c);
}
if self.pos != 0 && prev == Some('\n') {
self.line += 1;
self.line_pos = 0;
}
} else if pos < self.pos {
todo!("Going back is not supported");
self.source.content().as_str()[pos..self.pos]
.char_indices()
.rev()
.for_each(|(len, c)| {
self.pos -= len;
if c == '\n' {
self.line -= 1;
}
});
self.line_pos = self.source.content().as_str()[..self.pos]
.char_indices()
.rev()
.find(|(_, c)| *c == '\n')
.map(|(line_start, _)| self.pos - line_start)
.unwrap_or(0);
}
// May fail if pos is not utf8-aligned
assert_eq!(pos, self.pos);
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Token { pub struct Token {
pub range: Range<usize>, pub range: Range<usize>,

View file

@ -1,11 +1,10 @@
#![feature(char_indices_offset)]
mod document;
mod compiler;
mod parser;
mod elements;
mod lua;
mod cache; mod cache;
mod compiler;
mod document;
mod elements;
mod lsp; mod lsp;
mod lua;
mod parser;
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
@ -14,21 +13,25 @@ use std::sync::Arc;
use dashmap::DashMap; use dashmap::DashMap;
use document::document::Document; use document::document::Document;
use document::element::Element; use document::element::Element;
use lsp::semantic::{semantic_token_from_document, LEGEND_TYPE}; use lsp::semantic::semantic_token_from_document;
use parser::langparser::LangParser; use parser::langparser::LangParser;
use parser::parser::Parser; use parser::parser::Parser;
use parser::parser::ParserState;
use parser::source::SourceFile; use parser::source::SourceFile;
use tower_lsp::jsonrpc::Result; use tower_lsp::jsonrpc::Result;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer, LspService, Server}; use tower_lsp::Client;
use tower_lsp::LanguageServer;
use tower_lsp::LspService;
use tower_lsp::Server;
#[derive(Debug)] #[derive(Debug)]
struct Backend { struct Backend {
client: Client, client: Client,
document_map: DashMap<String, String>, document_map: DashMap<String, String>,
//ast_map: DashMap<String, Vec<Box<dyn Element>>>, //ast_map: DashMap<String, Vec<Box<dyn Element>>>,
//variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>, //variables: DashMap<String, HashMap<String, Arc<dyn Variable + Send + Sync + 'static>>>,
semantic_token_map: DashMap<String, Vec<SemanticToken>>, semantic_token_map: DashMap<String, Vec<SemanticToken>>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -44,27 +47,24 @@ impl Backend {
// TODO: Create a custom parser for the lsp // TODO: Create a custom parser for the lsp
// Which will require a dyn Document to work // Which will require a dyn Document to work
let source = SourceFile::with_content( let source = Rc::new(SourceFile::with_content(params.uri.to_string(), params.text.clone(), None));
params.uri.to_string(),
params.text.clone(),
None);
let parser = LangParser::default(); let parser = LangParser::default();
let doc = parser.parse(Rc::new(source), None); let (doc, state) = parser.parse(ParserState::new_with_semantics(&parser, None), source.clone(), None);
//let semantic_tokens = semantic_token_from_document(&doc); self.semantic_token_map
//self.semantic_token_map .insert(params.uri.to_string(),
// .insert(params.uri.to_string(), semantic_tokens); state.shared.semantics.);
} }
} }
#[tower_lsp::async_trait] #[tower_lsp::async_trait]
impl LanguageServer for Backend { impl LanguageServer for Backend {
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> { async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
Ok(InitializeResult { Ok(InitializeResult {
server_info: None, server_info: None,
capabilities: ServerCapabilities { capabilities: ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Kind( text_document_sync: Some(TextDocumentSyncCapability::Kind(
TextDocumentSyncKind::FULL, TextDocumentSyncKind::FULL,
)), )),
completion_provider: Some(CompletionOptions { completion_provider: Some(CompletionOptions {
resolve_provider: Some(false), resolve_provider: Some(false),
@ -74,44 +74,42 @@ impl LanguageServer for Backend {
completion_item: None, completion_item: None,
}), }),
semantic_tokens_provider: Some( semantic_tokens_provider: Some(
SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions( SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
SemanticTokensRegistrationOptions { SemanticTokensRegistrationOptions {
text_document_registration_options: { text_document_registration_options: {
TextDocumentRegistrationOptions { TextDocumentRegistrationOptions {
document_selector: Some(vec![DocumentFilter { document_selector: Some(vec![DocumentFilter {
language: Some("nml".to_string()), language: Some("nml".to_string()),
scheme: Some("file".to_string()), scheme: Some("file".to_string()),
pattern: None, pattern: None,
}]), }]),
} }
}, },
semantic_tokens_options: SemanticTokensOptions { semantic_tokens_options: SemanticTokensOptions {
work_done_progress_options: WorkDoneProgressOptions::default(), work_done_progress_options: WorkDoneProgressOptions::default(),
legend: SemanticTokensLegend { legend: SemanticTokensLegend {
token_types: LEGEND_TYPE.into(), token_types: lsp::semantic::LEGEND_TYPE.into(),
token_modifiers: vec![], token_modifiers: vec![],
}, },
range: None, //Some(true), range: None, //Some(true),
full: Some(SemanticTokensFullOptions::Bool(true)), full: Some(SemanticTokensFullOptions::Bool(true)),
}, },
static_registration_options: StaticRegistrationOptions::default(), static_registration_options: StaticRegistrationOptions::default(),
}, },
), ),
), ),
..ServerCapabilities::default() ..ServerCapabilities::default()
}, },
}) })
} }
async fn initialized(&self, _: InitializedParams) { async fn initialized(&self, _: InitializedParams) {
self.client self.client
.log_message(MessageType::INFO, "server initialized!") .log_message(MessageType::INFO, "server initialized!")
.await; .await;
} }
async fn shutdown(&self) -> Result<()> { async fn shutdown(&self) -> Result<()> { Ok(()) }
Ok(())
}
async fn did_open(&self, params: DidOpenTextDocumentParams) { async fn did_open(&self, params: DidOpenTextDocumentParams) {
self.client self.client
@ -132,16 +130,16 @@ impl LanguageServer for Backend {
.await .await
} }
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> { async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
let uri = params.text_document_position.text_document.uri; let uri = params.text_document_position.text_document.uri;
let position = params.text_document_position.position; let position = params.text_document_position.position;
let completions = || -> Option<Vec<CompletionItem>> { let completions = || -> Option<Vec<CompletionItem>> {
let mut ret = Vec::with_capacity(0); let mut ret = Vec::with_capacity(0);
Some(ret) Some(ret)
}(); }();
Ok(completions.map(CompletionResponse::Array)) Ok(completions.map(CompletionResponse::Array))
} }
async fn semantic_tokens_full( async fn semantic_tokens_full(
&self, &self,
@ -153,10 +151,9 @@ impl LanguageServer for Backend {
.await; .await;
if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) { if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) {
let data = semantic_tokens.iter() let data = semantic_tokens
.filter_map(|token| { .iter()
Some(token.clone()) .filter_map(|token| Some(token.clone()))
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens { return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
@ -170,15 +167,13 @@ impl LanguageServer for Backend {
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let stdin = tokio::io::stdin(); let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout(); let stdout = tokio::io::stdout();
let (service, socket) = LspService::new( let (service, socket) = LspService::new(|client| Backend {
|client| client,
Backend { document_map: DashMap::new(),
client, semantic_token_map: DashMap::new(),
document_map: DashMap::new(), });
semantic_token_map: DashMap::new(), Server::new(stdin, stdout, socket).serve(service).await;
});
Server::new(stdin, stdout, socket).serve(service).await;
} }