Inlay hints

This commit is contained in:
ef3d0c3e 2024-10-25 10:05:13 +02:00
parent 430c48a61a
commit 23f9066f75
23 changed files with 298 additions and 96 deletions

View file

@ -369,16 +369,9 @@ impl RegexRule for CodeRule {
let code_lang = match matches.get(2) {
None => "Plain Text".to_string(),
Some(lang) => {
let code_lang = lang.as_str().trim_start().trim_end().to_string();
let mut code_lang = lang.as_str().trim_start().trim_end().to_string();
if code_lang.is_empty() {
report_err!(
&mut reports,
token.source(),
"Missing Code Language".into(),
span(lang.range(), "No language specified".into())
);
return reports;
code_lang = "Plain Text".into();
}
if Code::get_syntaxes()
.find_syntax_by_name(code_lang.as_str())
@ -511,7 +504,7 @@ impl RegexRule for CodeRule {
}
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches
.get(0)

View file

@ -91,7 +91,7 @@ impl RegexRule for CommentRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let comment = matches.get(1).unwrap().range();
sems.add(comment.start - 2..comment.end, tokens.comment);

View file

@ -8,6 +8,7 @@ use std::rc::Rc;
use std::sync::Arc;
use ariadne::Fmt;
use lsp::semantic::Semantics;
use mlua::Error::BadArgument;
use mlua::Function;
use mlua::Lua;
@ -51,14 +52,14 @@ impl CustomStyle for LuaCustomStyle {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
let mut ctx = KernelContext::new(
location.clone(),
state,
document,
};
);
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
kernel.run_with_context(&mut ctx, |lua| {
let chunk = lua.load(self.start.as_str());
if let Err(err) = chunk.eval::<()>() {
report_err!(
@ -85,14 +86,14 @@ impl CustomStyle for LuaCustomStyle {
) -> Vec<Report> {
let kernel: Ref<'_, Kernel> =
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
let ctx = KernelContext {
location: location.clone(),
let mut ctx = KernelContext::new(
location.clone(),
state,
document,
};
);
let mut reports = vec![];
kernel.run_with_context(ctx, |lua| {
kernel.run_with_context(&mut ctx, |lua| {
let chunk = lua.load(self.end.as_str());
if let Err(err) = chunk.eval::<()>() {
report_err!(
@ -336,6 +337,12 @@ impl Rule for CustomStyleRule {
style.on_start(token.clone(), state, document)
};
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(token.range.clone(), tokens.customstyle_marker);
}
(cursor.at(token.end()), unsafe {
std::mem::transmute(reports)
})

View file

@ -344,7 +344,7 @@ impl RegexRule for GraphRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = token.range;
sems.add(

View file

@ -180,7 +180,7 @@ impl RegexRule for ImportRule {
}
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
// @import
let import =

View file

@ -497,7 +497,7 @@ impl RegexRule for LayoutRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let start = matches
.get(0)
@ -581,7 +581,7 @@ impl RegexRule for LayoutRule {
};
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let start = matches
.get(0)
@ -668,7 +668,7 @@ impl RegexRule for LayoutRule {
layout_state.stack.pop();
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let start = matches
.get(0)

View file

@ -145,7 +145,7 @@ impl RegexRule for LinkRule {
}
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(
display.range().start - 1..display.range().start,
@ -215,7 +215,7 @@ impl RegexRule for LinkRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(
matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1,

View file

@ -336,7 +336,7 @@ impl Rule for ListRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(cursor.source.clone(), &state.shared.semantics)
Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{
sems.add(captures.get(1).unwrap().range(), tokens.list_bullet);
if let Some(props) = captures.get(2).map(|m| m.range()) {

View file

@ -210,7 +210,7 @@ impl RegexRule for RawRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches.get(0).unwrap().range();
sems.add(range.start..range.start + 2, tokens.raw_sep);

View file

@ -327,7 +327,7 @@ impl RegexRule for ReferenceRule {
}
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let link = matches.get(1).unwrap().range();
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
@ -357,7 +357,7 @@ impl RegexRule for ReferenceRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let link = matches.get(1).unwrap().range();
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
@ -368,7 +368,7 @@ impl RegexRule for ReferenceRule {
}
if let (Some((sems, tokens)), Some(props)) = (
Semantics::from_source(token.source(), &state.shared.semantics),
Semantics::from_source(token.source(), &state.shared.lsp),
matches.get(2).map(|m| m.range()),
) {
sems.add(props.start - 1..props.start, tokens.reference_props_sep);

View file

@ -14,6 +14,7 @@ use crate::parser::source::VirtualSource;
use crate::parser::util;
use crate::parser::util::escape_source;
use ariadne::Fmt;
use lsp::hints::Hints;
use mlua::Lua;
use regex::Captures;
use regex::Regex;
@ -141,14 +142,13 @@ impl RegexRule for ScriptRule {
"Invalid Kernel Code".into(),
span(script_range, "Kernel code is empty".into())
);
return reports;
}
let execute = |lua: &Lua| {
let chunk = lua.load(source.content()).set_name(kernel_name);
if index == 0
// Exec
// Exec @<>@
{
if let Err(e) = chunk.exec() {
report_err!(
@ -160,10 +160,9 @@ impl RegexRule for ScriptRule {
format!("Kernel execution failed:\n{}", e)
)
);
return reports;
}
} else
// Eval
// Eval %<>%
{
// Validate kind
let kind = match matches.get(2) {
@ -177,7 +176,7 @@ impl RegexRule for ScriptRule {
"Invalid Kernel Code Kind".into(),
span(kind.range(), msg)
);
return reports;
return;
}
},
};
@ -246,18 +245,18 @@ impl RegexRule for ScriptRule {
}
}
}
reports
};
let ctx = KernelContext {
location: Token::new(0..source.content().len(), source.clone()),
let mut ctx = KernelContext::new(
Token::new(0..source.content().len(), source.clone()),
state,
document,
};
);
kernel.run_with_context(&mut ctx, execute);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = matches
.get(0)
@ -288,7 +287,21 @@ impl RegexRule for ScriptRule {
}
sems.add(range.end - 2..range.end, tokens.script_sep);
}
kernel.run_with_context(ctx, execute)
if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp)
{
let mut label = String::new();
ctx.redirects.iter()
.for_each(|redir| {
label += format!("{}: {} ", redir.source, redir.content).as_str();
});
if !label.is_empty()
{
label.pop();
hints.add(matches.get(0).unwrap().end(), label);
}
}
reports
}
}

View file

@ -336,7 +336,7 @@ impl RegexRule for SectionRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(matches.get(1).unwrap().range(), tokens.section_heading);
if let Some(reference) = matches.get(2) {

View file

@ -196,7 +196,7 @@ impl RegexRule for StyleRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
sems.add(token.start()..token.end(), tokens.style_marker);
}

View file

@ -425,7 +425,7 @@ impl RegexRule for TexRule {
);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let range = token.range;
sems.add(

View file

@ -254,7 +254,7 @@ impl RegexRule for VariableRule {
}
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let name = matches.get(2).unwrap().range();
if let Some(kind) = matches.get(1).map(|m| m.range()) {
@ -426,7 +426,7 @@ impl RegexRule for VariableSubstitutionRule {
variable.parse(state, token.clone(), document);
if let Some((sems, tokens)) =
Semantics::from_source(token.source(), &state.shared.semantics)
Semantics::from_source(token.source(), &state.shared.lsp)
{
let name = matches.get(1).unwrap().range();
sems.add(name.start - 1..name.start, tokens.variable_sub_sep);

25
src/lsp/data.rs Normal file
View file

@ -0,0 +1,25 @@
use std::collections::HashMap;
use std::rc::Rc;
use crate::parser::source::Source;
use super::hints::HintsData;
use super::semantic::SemanticsData;
use super::semantic::Tokens;
#[derive(Debug)]
pub struct LSPData {
pub semantic_tokens: Tokens,
pub semantic_data: HashMap<Rc<dyn Source>, SemanticsData>,
pub inlay_hints: HashMap<Rc<dyn Source>, HintsData>,
}
impl LSPData {
pub fn new() -> Self {
Self {
semantic_tokens: Tokens::new(),
semantic_data: HashMap::new(),
inlay_hints: HashMap::new(),
}
}
}

102
src/lsp/hints.rs Normal file
View file

@ -0,0 +1,102 @@
use std::{cell::{Ref, RefCell}, rc::Rc};
use tower_lsp::lsp_types::InlayHint;
use crate::parser::source::{LineCursor, Source, SourceFile, VirtualSource};
use super::data::LSPData;
/// Per file hints
#[derive(Debug)]
pub struct HintsData
{
/// The current cursor
cursor: RefCell<LineCursor>,
/// The hints
pub hints: RefCell<Vec<InlayHint>>,
}
impl HintsData {
pub fn new(source: Rc<dyn Source>) -> Self {
Self {
cursor: RefCell::new(LineCursor::new(source)),
hints: RefCell::new(vec![]),
}
}
}
#[derive(Debug)]
pub struct Hints<'a> {
pub(self) hints: Ref<'a, HintsData>,
// The source used when resolving the parent source
pub(self) original_source: Rc<dyn Source>,
/// The resolved parent source
pub(self) source: Rc<dyn Source>,
}
impl<'a> Hints<'a> {
fn from_source_impl(
source: Rc<dyn Source>,
lsp: &'a Option<RefCell<LSPData>>,
original_source: Rc<dyn Source>,
) -> Option<Self> {
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
return None;
}
if let Some(location) = source
.clone()
.downcast_rc::<VirtualSource>()
.ok()
.as_ref()
.map(|parent| parent.location())
.unwrap_or(None)
{
return Self::from_source_impl(location.source(), lsp, original_source);
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
return Ref::filter_map(
lsp.as_ref().unwrap().borrow(),
|lsp: &LSPData| {
lsp.inlay_hints.get(&(source.clone() as Rc<dyn Source>))
},
)
.ok()
.map(|hints| {
Self {
hints,
source,
original_source,
}
});
}
None
}
pub fn from_source(
source: Rc<dyn Source>,
lsp: &'a Option<RefCell<LSPData>>,
) -> Option<Self> {
if lsp.is_none() {
return None;
}
Self::from_source_impl(source.clone(), lsp, source)
}
pub fn add(&self, position: usize, label: String)
{
let mut cursor = self.hints.cursor.borrow_mut();
cursor.move_to(position);
self.hints.hints.borrow_mut().push(InlayHint {
position: tower_lsp::lsp_types::Position { line: cursor.line as u32, character: cursor.line_pos as u32 },
label: tower_lsp::lsp_types::InlayHintLabel::String(label),
kind: Some(tower_lsp::lsp_types::InlayHintKind::PARAMETER),
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
})
}
}

View file

@ -1 +1,3 @@
pub mod semantic;
pub mod data;
pub mod hints;

View file

@ -1,6 +1,5 @@
use std::cell::Ref;
use std::cell::RefCell;
use std::collections::HashMap;
use std::ops::Range;
use std::rc::Rc;
@ -14,6 +13,8 @@ use crate::parser::source::SourceFile;
use crate::parser::source::SourcePosition;
use crate::parser::source::VirtualSource;
use super::data::LSPData;
pub const TOKEN_TYPE: &[SemanticTokenType] = &[
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
@ -102,6 +103,8 @@ pub struct Tokens {
pub style_marker: (u32, u32),
pub customstyle_marker: (u32, u32),
pub import_import: (u32, u32),
pub import_as_sep: (u32, u32),
pub import_as: (u32, u32),
@ -179,6 +182,8 @@ impl Tokens {
style_marker: token!("operator"),
customstyle_marker: token!("operator"),
import_import: token!("macro"),
import_as_sep: token!("operator"),
import_as: token!("operator"),
@ -264,7 +269,7 @@ impl SemanticsData {
#[derive(Debug)]
pub struct Semantics<'a> {
pub(self) sems: Ref<'a, SemanticsData>,
// TODO
// The source used when resolving the parent source
pub(self) original_source: Rc<dyn Source>,
/// The resolved parent source
pub(self) source: Rc<dyn Source>,
@ -273,7 +278,7 @@ pub struct Semantics<'a> {
impl<'a> Semantics<'a> {
fn from_source_impl(
source: Rc<dyn Source>,
semantics: &'a Option<RefCell<SemanticsHolder>>,
lsp: &'a Option<RefCell<LSPData>>,
original_source: Rc<dyn Source>,
) -> Option<(Self, Ref<'a, Tokens>)> {
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
@ -288,12 +293,12 @@ impl<'a> Semantics<'a> {
.map(|parent| parent.location())
.unwrap_or(None)
{
return Self::from_source_impl(location.source(), semantics, original_source);
return Self::from_source_impl(location.source(), lsp, original_source);
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
return Ref::filter_map(
semantics.as_ref().unwrap().borrow(),
|semantics: &SemanticsHolder| {
semantics.sems.get(&(source.clone() as Rc<dyn Source>))
lsp.as_ref().unwrap().borrow(),
|lsp: &LSPData| {
lsp.semantic_data.get(&(source.clone() as Rc<dyn Source>))
},
)
.ok()
@ -305,8 +310,8 @@ impl<'a> Semantics<'a> {
original_source,
},
Ref::map(
semantics.as_ref().unwrap().borrow(),
|semantics: &SemanticsHolder| &semantics.tokens,
lsp.as_ref().unwrap().borrow(),
|lsp: &LSPData| &lsp.semantic_tokens,
),
)
});
@ -316,12 +321,12 @@ impl<'a> Semantics<'a> {
pub fn from_source(
source: Rc<dyn Source>,
semantics: &'a Option<RefCell<SemanticsHolder>>,
lsp: &'a Option<RefCell<LSPData>>,
) -> Option<(Self, Ref<'a, Tokens>)> {
if semantics.is_none() {
if lsp.is_none() {
return None;
}
Self::from_source_impl(source.clone(), semantics, source)
Self::from_source_impl(source.clone(), lsp, source)
}
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
@ -365,42 +370,27 @@ impl<'a> Semantics<'a> {
}
}
#[derive(Debug)]
pub struct SemanticsHolder {
pub tokens: Tokens,
pub sems: HashMap<Rc<dyn Source>, SemanticsData>,
}
impl SemanticsHolder {
pub fn new() -> Self {
Self {
tokens: Tokens::new(),
sems: HashMap::new(),
}
}
}
#[cfg(test)]
pub mod tests {
#[macro_export]
macro_rules! validate_semantics {
($state:expr, $source:expr, $idx:expr,) => {};
($state:expr, $source:expr, $idx:expr, $token_name:ident { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
let token = $state.shared.semantics
let token = $state.shared.lsp
.as_ref()
.unwrap()
.borrow()
.sems
.semantic_data
.get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
.unwrap()
.tokens
.borrow()
[$idx];
let token_type = $state.shared.semantics
let token_type = $state.shared.lsp
.as_ref()
.unwrap()
.borrow()
.tokens
.semantic_tokens
.$token_name;
let found_token = (token.token_type, token.token_modifiers_bitset);

View file

@ -8,14 +8,32 @@ use crate::parser::parser::Parser;
use crate::parser::parser::ParserState;
use crate::parser::source::Token;
/// Redirected data from lua execution
pub struct KernelRedirect
{
/// Message source e.g print()
pub source: String,
/// Message content
pub content: String,
}
pub struct KernelContext<'a, 'b, 'c> {
pub location: Token,
pub state: &'a ParserState<'a, 'b>,
pub document: &'c dyn Document<'c>,
pub redirects: Vec<KernelRedirect>,
}
impl<'a, 'b, 'c> KernelContext<'a, 'b, 'c> {
pub fn new(location: Token, state: &'a ParserState<'a, 'b>, document: &'c dyn Document<'c>) -> Self {
Self { location, state, document, redirects: vec![] }
}
}
thread_local! {
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
pub static CTX: RefCell<Option<&'static mut KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
}
#[derive(Debug)]
@ -42,6 +60,18 @@ impl Kernel {
lua.globals().set("nml", nml_table).unwrap();
}
lua.globals().set("print", lua.create_function(|_, msg: String| {
CTX.with_borrow_mut(|ctx| {
ctx.as_mut().map(|ctx| {
ctx.redirects.push(KernelRedirect {
source: "print".into(),
content: msg,
});
});
});
Ok(())
}).unwrap()).unwrap();
Self { lua }
}
@ -49,10 +79,11 @@ impl Kernel {
///
/// This is the only way lua code shoule be ran, because exported
/// functions may require the context in order to operate
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F) -> T
pub fn run_with_context<T, F>(&self, context: &mut KernelContext, f: F) -> T
where
F: FnOnce(&Lua) -> T,
{
// Redirects
CTX.set(Some(unsafe { std::mem::transmute(context) }));
let ret = f(&self.lua);
CTX.set(None);

View file

@ -5,6 +5,7 @@ use crate::document::document::Document;
use crate::document::element::DocumentEnd;
use crate::document::langdocument::LangDocument;
use crate::elements::text::Text;
use crate::lsp::hints::HintsData;
use crate::lsp::semantic::SemanticsData;
use super::parser::ParseMode;
@ -81,16 +82,20 @@ impl<'b> Parser for LangParser<'b> {
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
let doc = LangDocument::new(source.clone(), parent);
// Insert semantics into state
if let (Some(_), Some(semantics)) = (
// Insert lsp data into state
if let (Some(_), Some(lsp)) = (
source.clone().downcast_rc::<SourceFile>().ok(),
state.shared.semantics.as_ref(),
state.shared.lsp.as_ref(),
) {
let mut b = semantics.borrow_mut();
if !b.sems.contains_key(&source) {
b.sems
let mut b = lsp.borrow_mut();
if !b.semantic_data.contains_key(&source) {
b.semantic_data
.insert(source.clone(), SemanticsData::new(source.clone()));
}
if !b.inlay_hints.contains_key(&source) {
b.inlay_hints
.insert(source.clone(), HintsData::new(source.clone()));
}
}
let content = source.content();

View file

@ -17,7 +17,7 @@ use crate::document::element::ContainerElement;
use crate::document::element::ElemKind;
use crate::document::element::Element;
use crate::elements::paragraph::Paragraph;
use crate::lsp::semantic::SemanticsHolder;
use crate::lsp::data::LSPData;
use crate::lua::kernel::Kernel;
use crate::lua::kernel::KernelHolder;
use ariadne::Color;
@ -66,8 +66,8 @@ pub struct SharedState {
/// The custom styles
pub custom_styles: RefCell<CustomStyleHolder>,
/// The semantics
pub semantics: Option<RefCell<SemanticsHolder>>,
/// The lsp data
pub lsp: Option<RefCell<LSPData>>,
}
impl SharedState {
@ -79,7 +79,7 @@ impl SharedState {
styles: RefCell::new(StyleHolder::default()),
layouts: RefCell::new(LayoutHolder::default()),
custom_styles: RefCell::new(CustomStyleHolder::default()),
semantics: enable_semantics.then_some(RefCell::new(SemanticsHolder::new())),
lsp: enable_semantics.then_some(RefCell::new(LSPData::new())),
};
// Register default kernel

View file

@ -27,6 +27,7 @@ struct Backend {
document_map: DashMap<String, String>,
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
diagnostic_map: DashMap<String, Vec<Diagnostic>>,
hints_map: DashMap<String, Vec<InlayHint>>,
}
#[derive(Debug)]
@ -47,10 +48,13 @@ impl Backend {
params.text.clone(),
None,
));
// Diagnostics
self.diagnostic_map.clear();
let parser = LangParser::new(false, Box::new(
|_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports)
));
// Parse
let (_doc, state) = parser.parse(
ParserState::new_with_semantics(&parser, None),
source.clone(),
@ -58,9 +62,10 @@ impl Backend {
ParseMode::default(),
);
if let Some(sems) = state.shared.semantics.as_ref() {
let borrow = sems.borrow();
for (source, sem) in &borrow.sems {
// Semantics
if let Some(lsp) = state.shared.lsp.as_ref() {
let borrow = lsp.borrow();
for (source, sem) in &borrow.semantic_data {
if let Some(path) = source
.clone()
.downcast_rc::<SourceFile>()
@ -72,6 +77,22 @@ impl Backend {
}
}
}
// Hints
if let Some(lsp) = state.shared.lsp.as_ref() {
let borrow = lsp.borrow();
for (source, hints) in &borrow.inlay_hints {
if let Some(path) = source
.clone()
.downcast_rc::<SourceFile>()
.ok()
.map(|source| source.path().to_owned())
{
self.hints_map
.insert(path, hints.hints.replace(vec![]));
}
}
}
}
}
@ -127,6 +148,7 @@ impl LanguageServer for Backend {
work_done_progress_options: WorkDoneProgressOptions::default(),
})
),
inlay_hint_provider: Some(OneOf::Left(true)),
..ServerCapabilities::default()
},
server_info: Some(ServerInfo {
@ -181,12 +203,12 @@ impl LanguageServer for Backend {
&self,
params: SemanticTokensParams,
) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
let uri = params.text_document.uri.to_string();
let uri = params.text_document.uri;
self.client
.log_message(MessageType::LOG, "semantic_token_full")
.await;
if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) {
if let Some(semantic_tokens) = self.semantic_token_map.get(uri.as_str()) {
let data = semantic_tokens
.iter()
.filter_map(|token| Some(token.clone()))
@ -218,6 +240,17 @@ impl LanguageServer for Backend {
)
)
}
async fn inlay_hint(&self, params: InlayHintParams) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>>
{
if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str())
{
let (_, data) = hints.pair();
return Ok(Some(data.to_owned()));
}
Ok(None)
}
}
#[tokio::main]
@ -230,6 +263,7 @@ async fn main() {
document_map: DashMap::new(),
semantic_token_map: DashMap::new(),
diagnostic_map: DashMap::new(),
hints_map: DashMap::new(),
});
Server::new(stdin, stdout, socket).serve(service).await;
}