Inlay hints
This commit is contained in:
parent
430c48a61a
commit
23f9066f75
23 changed files with 298 additions and 96 deletions
|
@ -369,16 +369,9 @@ impl RegexRule for CodeRule {
|
||||||
let code_lang = match matches.get(2) {
|
let code_lang = match matches.get(2) {
|
||||||
None => "Plain Text".to_string(),
|
None => "Plain Text".to_string(),
|
||||||
Some(lang) => {
|
Some(lang) => {
|
||||||
let code_lang = lang.as_str().trim_start().trim_end().to_string();
|
let mut code_lang = lang.as_str().trim_start().trim_end().to_string();
|
||||||
if code_lang.is_empty() {
|
if code_lang.is_empty() {
|
||||||
report_err!(
|
code_lang = "Plain Text".into();
|
||||||
&mut reports,
|
|
||||||
token.source(),
|
|
||||||
"Missing Code Language".into(),
|
|
||||||
span(lang.range(), "No language specified".into())
|
|
||||||
);
|
|
||||||
|
|
||||||
return reports;
|
|
||||||
}
|
}
|
||||||
if Code::get_syntaxes()
|
if Code::get_syntaxes()
|
||||||
.find_syntax_by_name(code_lang.as_str())
|
.find_syntax_by_name(code_lang.as_str())
|
||||||
|
@ -511,7 +504,7 @@ impl RegexRule for CodeRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let range = matches
|
let range = matches
|
||||||
.get(0)
|
.get(0)
|
||||||
|
|
|
@ -91,7 +91,7 @@ impl RegexRule for CommentRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let comment = matches.get(1).unwrap().range();
|
let comment = matches.get(1).unwrap().range();
|
||||||
sems.add(comment.start - 2..comment.end, tokens.comment);
|
sems.add(comment.start - 2..comment.end, tokens.comment);
|
||||||
|
|
|
@ -8,6 +8,7 @@ use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ariadne::Fmt;
|
use ariadne::Fmt;
|
||||||
|
use lsp::semantic::Semantics;
|
||||||
use mlua::Error::BadArgument;
|
use mlua::Error::BadArgument;
|
||||||
use mlua::Function;
|
use mlua::Function;
|
||||||
use mlua::Lua;
|
use mlua::Lua;
|
||||||
|
@ -51,14 +52,14 @@ impl CustomStyle for LuaCustomStyle {
|
||||||
let kernel: Ref<'_, Kernel> =
|
let kernel: Ref<'_, Kernel> =
|
||||||
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
|
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
|
||||||
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
|
//let kernel = RefMut::map(parser_state.shared.kernels.borrow(), |ker| ker.get("main").unwrap());
|
||||||
let ctx = KernelContext {
|
let mut ctx = KernelContext::new(
|
||||||
location: location.clone(),
|
location.clone(),
|
||||||
state,
|
state,
|
||||||
document,
|
document,
|
||||||
};
|
);
|
||||||
|
|
||||||
let mut reports = vec![];
|
let mut reports = vec![];
|
||||||
kernel.run_with_context(ctx, |lua| {
|
kernel.run_with_context(&mut ctx, |lua| {
|
||||||
let chunk = lua.load(self.start.as_str());
|
let chunk = lua.load(self.start.as_str());
|
||||||
if let Err(err) = chunk.eval::<()>() {
|
if let Err(err) = chunk.eval::<()>() {
|
||||||
report_err!(
|
report_err!(
|
||||||
|
@ -85,14 +86,14 @@ impl CustomStyle for LuaCustomStyle {
|
||||||
) -> Vec<Report> {
|
) -> Vec<Report> {
|
||||||
let kernel: Ref<'_, Kernel> =
|
let kernel: Ref<'_, Kernel> =
|
||||||
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
|
Ref::map(state.shared.kernels.borrow(), |b| b.get("main").unwrap());
|
||||||
let ctx = KernelContext {
|
let mut ctx = KernelContext::new(
|
||||||
location: location.clone(),
|
location.clone(),
|
||||||
state,
|
state,
|
||||||
document,
|
document,
|
||||||
};
|
);
|
||||||
|
|
||||||
let mut reports = vec![];
|
let mut reports = vec![];
|
||||||
kernel.run_with_context(ctx, |lua| {
|
kernel.run_with_context(&mut ctx, |lua| {
|
||||||
let chunk = lua.load(self.end.as_str());
|
let chunk = lua.load(self.end.as_str());
|
||||||
if let Err(err) = chunk.eval::<()>() {
|
if let Err(err) = chunk.eval::<()>() {
|
||||||
report_err!(
|
report_err!(
|
||||||
|
@ -336,6 +337,12 @@ impl Rule for CustomStyleRule {
|
||||||
style.on_start(token.clone(), state, document)
|
style.on_start(token.clone(), state, document)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if let Some((sems, tokens)) =
|
||||||
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
|
{
|
||||||
|
sems.add(token.range.clone(), tokens.customstyle_marker);
|
||||||
|
}
|
||||||
|
|
||||||
(cursor.at(token.end()), unsafe {
|
(cursor.at(token.end()), unsafe {
|
||||||
std::mem::transmute(reports)
|
std::mem::transmute(reports)
|
||||||
})
|
})
|
||||||
|
|
|
@ -344,7 +344,7 @@ impl RegexRule for GraphRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let range = token.range;
|
let range = token.range;
|
||||||
sems.add(
|
sems.add(
|
||||||
|
|
|
@ -180,7 +180,7 @@ impl RegexRule for ImportRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
// @import
|
// @import
|
||||||
let import =
|
let import =
|
||||||
|
|
|
@ -497,7 +497,7 @@ impl RegexRule for LayoutRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let start = matches
|
let start = matches
|
||||||
.get(0)
|
.get(0)
|
||||||
|
@ -581,7 +581,7 @@ impl RegexRule for LayoutRule {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let start = matches
|
let start = matches
|
||||||
.get(0)
|
.get(0)
|
||||||
|
@ -668,7 +668,7 @@ impl RegexRule for LayoutRule {
|
||||||
layout_state.stack.pop();
|
layout_state.stack.pop();
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let start = matches
|
let start = matches
|
||||||
.get(0)
|
.get(0)
|
||||||
|
|
|
@ -145,7 +145,7 @@ impl RegexRule for LinkRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
sems.add(
|
sems.add(
|
||||||
display.range().start - 1..display.range().start,
|
display.range().start - 1..display.range().start,
|
||||||
|
@ -215,7 +215,7 @@ impl RegexRule for LinkRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
sems.add(
|
sems.add(
|
||||||
matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1,
|
matches.get(1).unwrap().end()..matches.get(1).unwrap().end() + 1,
|
||||||
|
|
|
@ -336,7 +336,7 @@ impl Rule for ListRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(cursor.source.clone(), &state.shared.semantics)
|
Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
sems.add(captures.get(1).unwrap().range(), tokens.list_bullet);
|
sems.add(captures.get(1).unwrap().range(), tokens.list_bullet);
|
||||||
if let Some(props) = captures.get(2).map(|m| m.range()) {
|
if let Some(props) = captures.get(2).map(|m| m.range()) {
|
||||||
|
|
|
@ -210,7 +210,7 @@ impl RegexRule for RawRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let range = matches.get(0).unwrap().range();
|
let range = matches.get(0).unwrap().range();
|
||||||
sems.add(range.start..range.start + 2, tokens.raw_sep);
|
sems.add(range.start..range.start + 2, tokens.raw_sep);
|
||||||
|
|
|
@ -327,7 +327,7 @@ impl RegexRule for ReferenceRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let link = matches.get(1).unwrap().range();
|
let link = matches.get(1).unwrap().range();
|
||||||
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
|
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
|
||||||
|
@ -357,7 +357,7 @@ impl RegexRule for ReferenceRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let link = matches.get(1).unwrap().range();
|
let link = matches.get(1).unwrap().range();
|
||||||
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
|
sems.add(link.start - 2..link.start - 1, tokens.reference_operator);
|
||||||
|
@ -368,7 +368,7 @@ impl RegexRule for ReferenceRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let (Some((sems, tokens)), Some(props)) = (
|
if let (Some((sems, tokens)), Some(props)) = (
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics),
|
Semantics::from_source(token.source(), &state.shared.lsp),
|
||||||
matches.get(2).map(|m| m.range()),
|
matches.get(2).map(|m| m.range()),
|
||||||
) {
|
) {
|
||||||
sems.add(props.start - 1..props.start, tokens.reference_props_sep);
|
sems.add(props.start - 1..props.start, tokens.reference_props_sep);
|
||||||
|
|
|
@ -14,6 +14,7 @@ use crate::parser::source::VirtualSource;
|
||||||
use crate::parser::util;
|
use crate::parser::util;
|
||||||
use crate::parser::util::escape_source;
|
use crate::parser::util::escape_source;
|
||||||
use ariadne::Fmt;
|
use ariadne::Fmt;
|
||||||
|
use lsp::hints::Hints;
|
||||||
use mlua::Lua;
|
use mlua::Lua;
|
||||||
use regex::Captures;
|
use regex::Captures;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
@ -141,14 +142,13 @@ impl RegexRule for ScriptRule {
|
||||||
"Invalid Kernel Code".into(),
|
"Invalid Kernel Code".into(),
|
||||||
span(script_range, "Kernel code is empty".into())
|
span(script_range, "Kernel code is empty".into())
|
||||||
);
|
);
|
||||||
return reports;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let execute = |lua: &Lua| {
|
let execute = |lua: &Lua| {
|
||||||
let chunk = lua.load(source.content()).set_name(kernel_name);
|
let chunk = lua.load(source.content()).set_name(kernel_name);
|
||||||
|
|
||||||
if index == 0
|
if index == 0
|
||||||
// Exec
|
// Exec @<>@
|
||||||
{
|
{
|
||||||
if let Err(e) = chunk.exec() {
|
if let Err(e) = chunk.exec() {
|
||||||
report_err!(
|
report_err!(
|
||||||
|
@ -160,10 +160,9 @@ impl RegexRule for ScriptRule {
|
||||||
format!("Kernel execution failed:\n{}", e)
|
format!("Kernel execution failed:\n{}", e)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
return reports;
|
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
// Eval
|
// Eval %<>%
|
||||||
{
|
{
|
||||||
// Validate kind
|
// Validate kind
|
||||||
let kind = match matches.get(2) {
|
let kind = match matches.get(2) {
|
||||||
|
@ -177,7 +176,7 @@ impl RegexRule for ScriptRule {
|
||||||
"Invalid Kernel Code Kind".into(),
|
"Invalid Kernel Code Kind".into(),
|
||||||
span(kind.range(), msg)
|
span(kind.range(), msg)
|
||||||
);
|
);
|
||||||
return reports;
|
return;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -246,18 +245,18 @@ impl RegexRule for ScriptRule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
reports
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let ctx = KernelContext {
|
let mut ctx = KernelContext::new(
|
||||||
location: Token::new(0..source.content().len(), source.clone()),
|
Token::new(0..source.content().len(), source.clone()),
|
||||||
state,
|
state,
|
||||||
document,
|
document,
|
||||||
};
|
);
|
||||||
|
|
||||||
|
kernel.run_with_context(&mut ctx, execute);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let range = matches
|
let range = matches
|
||||||
.get(0)
|
.get(0)
|
||||||
|
@ -288,7 +287,21 @@ impl RegexRule for ScriptRule {
|
||||||
}
|
}
|
||||||
sems.add(range.end - 2..range.end, tokens.script_sep);
|
sems.add(range.end - 2..range.end, tokens.script_sep);
|
||||||
}
|
}
|
||||||
kernel.run_with_context(ctx, execute)
|
|
||||||
|
if let Some(hints) = Hints::from_source(token.source(), &state.shared.lsp)
|
||||||
|
{
|
||||||
|
let mut label = String::new();
|
||||||
|
ctx.redirects.iter()
|
||||||
|
.for_each(|redir| {
|
||||||
|
label += format!("{}: {} ", redir.source, redir.content).as_str();
|
||||||
|
});
|
||||||
|
if !label.is_empty()
|
||||||
|
{
|
||||||
|
label.pop();
|
||||||
|
hints.add(matches.get(0).unwrap().end(), label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reports
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -336,7 +336,7 @@ impl RegexRule for SectionRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
sems.add(matches.get(1).unwrap().range(), tokens.section_heading);
|
sems.add(matches.get(1).unwrap().range(), tokens.section_heading);
|
||||||
if let Some(reference) = matches.get(2) {
|
if let Some(reference) = matches.get(2) {
|
||||||
|
|
|
@ -196,7 +196,7 @@ impl RegexRule for StyleRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
sems.add(token.start()..token.end(), tokens.style_marker);
|
sems.add(token.start()..token.end(), tokens.style_marker);
|
||||||
}
|
}
|
||||||
|
|
|
@ -425,7 +425,7 @@ impl RegexRule for TexRule {
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let range = token.range;
|
let range = token.range;
|
||||||
sems.add(
|
sems.add(
|
||||||
|
|
|
@ -254,7 +254,7 @@ impl RegexRule for VariableRule {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let name = matches.get(2).unwrap().range();
|
let name = matches.get(2).unwrap().range();
|
||||||
if let Some(kind) = matches.get(1).map(|m| m.range()) {
|
if let Some(kind) = matches.get(1).map(|m| m.range()) {
|
||||||
|
@ -426,7 +426,7 @@ impl RegexRule for VariableSubstitutionRule {
|
||||||
variable.parse(state, token.clone(), document);
|
variable.parse(state, token.clone(), document);
|
||||||
|
|
||||||
if let Some((sems, tokens)) =
|
if let Some((sems, tokens)) =
|
||||||
Semantics::from_source(token.source(), &state.shared.semantics)
|
Semantics::from_source(token.source(), &state.shared.lsp)
|
||||||
{
|
{
|
||||||
let name = matches.get(1).unwrap().range();
|
let name = matches.get(1).unwrap().range();
|
||||||
sems.add(name.start - 1..name.start, tokens.variable_sub_sep);
|
sems.add(name.start - 1..name.start, tokens.variable_sub_sep);
|
||||||
|
|
25
src/lsp/data.rs
Normal file
25
src/lsp/data.rs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use crate::parser::source::Source;
|
||||||
|
|
||||||
|
use super::hints::HintsData;
|
||||||
|
use super::semantic::SemanticsData;
|
||||||
|
use super::semantic::Tokens;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct LSPData {
|
||||||
|
pub semantic_tokens: Tokens,
|
||||||
|
pub semantic_data: HashMap<Rc<dyn Source>, SemanticsData>,
|
||||||
|
pub inlay_hints: HashMap<Rc<dyn Source>, HintsData>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LSPData {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
semantic_tokens: Tokens::new(),
|
||||||
|
semantic_data: HashMap::new(),
|
||||||
|
inlay_hints: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
102
src/lsp/hints.rs
Normal file
102
src/lsp/hints.rs
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
use std::{cell::{Ref, RefCell}, rc::Rc};
|
||||||
|
|
||||||
|
use tower_lsp::lsp_types::InlayHint;
|
||||||
|
|
||||||
|
use crate::parser::source::{LineCursor, Source, SourceFile, VirtualSource};
|
||||||
|
|
||||||
|
use super::data::LSPData;
|
||||||
|
|
||||||
|
/// Per file hints
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct HintsData
|
||||||
|
{
|
||||||
|
/// The current cursor
|
||||||
|
cursor: RefCell<LineCursor>,
|
||||||
|
|
||||||
|
/// The hints
|
||||||
|
pub hints: RefCell<Vec<InlayHint>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HintsData {
|
||||||
|
pub fn new(source: Rc<dyn Source>) -> Self {
|
||||||
|
Self {
|
||||||
|
cursor: RefCell::new(LineCursor::new(source)),
|
||||||
|
hints: RefCell::new(vec![]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Hints<'a> {
|
||||||
|
pub(self) hints: Ref<'a, HintsData>,
|
||||||
|
// The source used when resolving the parent source
|
||||||
|
pub(self) original_source: Rc<dyn Source>,
|
||||||
|
/// The resolved parent source
|
||||||
|
pub(self) source: Rc<dyn Source>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Hints<'a> {
|
||||||
|
fn from_source_impl(
|
||||||
|
source: Rc<dyn Source>,
|
||||||
|
lsp: &'a Option<RefCell<LSPData>>,
|
||||||
|
original_source: Rc<dyn Source>,
|
||||||
|
) -> Option<Self> {
|
||||||
|
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(location) = source
|
||||||
|
.clone()
|
||||||
|
.downcast_rc::<VirtualSource>()
|
||||||
|
.ok()
|
||||||
|
.as_ref()
|
||||||
|
.map(|parent| parent.location())
|
||||||
|
.unwrap_or(None)
|
||||||
|
{
|
||||||
|
return Self::from_source_impl(location.source(), lsp, original_source);
|
||||||
|
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
|
||||||
|
return Ref::filter_map(
|
||||||
|
lsp.as_ref().unwrap().borrow(),
|
||||||
|
|lsp: &LSPData| {
|
||||||
|
lsp.inlay_hints.get(&(source.clone() as Rc<dyn Source>))
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.ok()
|
||||||
|
.map(|hints| {
|
||||||
|
Self {
|
||||||
|
hints,
|
||||||
|
source,
|
||||||
|
original_source,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_source(
|
||||||
|
source: Rc<dyn Source>,
|
||||||
|
lsp: &'a Option<RefCell<LSPData>>,
|
||||||
|
) -> Option<Self> {
|
||||||
|
if lsp.is_none() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Self::from_source_impl(source.clone(), lsp, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(&self, position: usize, label: String)
|
||||||
|
{
|
||||||
|
let mut cursor = self.hints.cursor.borrow_mut();
|
||||||
|
cursor.move_to(position);
|
||||||
|
|
||||||
|
self.hints.hints.borrow_mut().push(InlayHint {
|
||||||
|
position: tower_lsp::lsp_types::Position { line: cursor.line as u32, character: cursor.line_pos as u32 },
|
||||||
|
label: tower_lsp::lsp_types::InlayHintLabel::String(label),
|
||||||
|
kind: Some(tower_lsp::lsp_types::InlayHintKind::PARAMETER),
|
||||||
|
text_edits: None,
|
||||||
|
tooltip: None,
|
||||||
|
padding_left: None,
|
||||||
|
padding_right: None,
|
||||||
|
data: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1 +1,3 @@
|
||||||
pub mod semantic;
|
pub mod semantic;
|
||||||
|
pub mod data;
|
||||||
|
pub mod hints;
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use std::cell::Ref;
|
use std::cell::Ref;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
@ -14,6 +13,8 @@ use crate::parser::source::SourceFile;
|
||||||
use crate::parser::source::SourcePosition;
|
use crate::parser::source::SourcePosition;
|
||||||
use crate::parser::source::VirtualSource;
|
use crate::parser::source::VirtualSource;
|
||||||
|
|
||||||
|
use super::data::LSPData;
|
||||||
|
|
||||||
pub const TOKEN_TYPE: &[SemanticTokenType] = &[
|
pub const TOKEN_TYPE: &[SemanticTokenType] = &[
|
||||||
SemanticTokenType::NAMESPACE,
|
SemanticTokenType::NAMESPACE,
|
||||||
SemanticTokenType::TYPE,
|
SemanticTokenType::TYPE,
|
||||||
|
@ -102,6 +103,8 @@ pub struct Tokens {
|
||||||
|
|
||||||
pub style_marker: (u32, u32),
|
pub style_marker: (u32, u32),
|
||||||
|
|
||||||
|
pub customstyle_marker: (u32, u32),
|
||||||
|
|
||||||
pub import_import: (u32, u32),
|
pub import_import: (u32, u32),
|
||||||
pub import_as_sep: (u32, u32),
|
pub import_as_sep: (u32, u32),
|
||||||
pub import_as: (u32, u32),
|
pub import_as: (u32, u32),
|
||||||
|
@ -179,6 +182,8 @@ impl Tokens {
|
||||||
|
|
||||||
style_marker: token!("operator"),
|
style_marker: token!("operator"),
|
||||||
|
|
||||||
|
customstyle_marker: token!("operator"),
|
||||||
|
|
||||||
import_import: token!("macro"),
|
import_import: token!("macro"),
|
||||||
import_as_sep: token!("operator"),
|
import_as_sep: token!("operator"),
|
||||||
import_as: token!("operator"),
|
import_as: token!("operator"),
|
||||||
|
@ -264,7 +269,7 @@ impl SemanticsData {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Semantics<'a> {
|
pub struct Semantics<'a> {
|
||||||
pub(self) sems: Ref<'a, SemanticsData>,
|
pub(self) sems: Ref<'a, SemanticsData>,
|
||||||
// TODO
|
// The source used when resolving the parent source
|
||||||
pub(self) original_source: Rc<dyn Source>,
|
pub(self) original_source: Rc<dyn Source>,
|
||||||
/// The resolved parent source
|
/// The resolved parent source
|
||||||
pub(self) source: Rc<dyn Source>,
|
pub(self) source: Rc<dyn Source>,
|
||||||
|
@ -273,7 +278,7 @@ pub struct Semantics<'a> {
|
||||||
impl<'a> Semantics<'a> {
|
impl<'a> Semantics<'a> {
|
||||||
fn from_source_impl(
|
fn from_source_impl(
|
||||||
source: Rc<dyn Source>,
|
source: Rc<dyn Source>,
|
||||||
semantics: &'a Option<RefCell<SemanticsHolder>>,
|
lsp: &'a Option<RefCell<LSPData>>,
|
||||||
original_source: Rc<dyn Source>,
|
original_source: Rc<dyn Source>,
|
||||||
) -> Option<(Self, Ref<'a, Tokens>)> {
|
) -> Option<(Self, Ref<'a, Tokens>)> {
|
||||||
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
|
if source.name().starts_with(":LUA:") && source.downcast_ref::<VirtualSource>().is_some() {
|
||||||
|
@ -288,12 +293,12 @@ impl<'a> Semantics<'a> {
|
||||||
.map(|parent| parent.location())
|
.map(|parent| parent.location())
|
||||||
.unwrap_or(None)
|
.unwrap_or(None)
|
||||||
{
|
{
|
||||||
return Self::from_source_impl(location.source(), semantics, original_source);
|
return Self::from_source_impl(location.source(), lsp, original_source);
|
||||||
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
|
} else if let Ok(source) = source.clone().downcast_rc::<SourceFile>() {
|
||||||
return Ref::filter_map(
|
return Ref::filter_map(
|
||||||
semantics.as_ref().unwrap().borrow(),
|
lsp.as_ref().unwrap().borrow(),
|
||||||
|semantics: &SemanticsHolder| {
|
|lsp: &LSPData| {
|
||||||
semantics.sems.get(&(source.clone() as Rc<dyn Source>))
|
lsp.semantic_data.get(&(source.clone() as Rc<dyn Source>))
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.ok()
|
.ok()
|
||||||
|
@ -305,8 +310,8 @@ impl<'a> Semantics<'a> {
|
||||||
original_source,
|
original_source,
|
||||||
},
|
},
|
||||||
Ref::map(
|
Ref::map(
|
||||||
semantics.as_ref().unwrap().borrow(),
|
lsp.as_ref().unwrap().borrow(),
|
||||||
|semantics: &SemanticsHolder| &semantics.tokens,
|
|lsp: &LSPData| &lsp.semantic_tokens,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
@ -316,12 +321,12 @@ impl<'a> Semantics<'a> {
|
||||||
|
|
||||||
pub fn from_source(
|
pub fn from_source(
|
||||||
source: Rc<dyn Source>,
|
source: Rc<dyn Source>,
|
||||||
semantics: &'a Option<RefCell<SemanticsHolder>>,
|
lsp: &'a Option<RefCell<LSPData>>,
|
||||||
) -> Option<(Self, Ref<'a, Tokens>)> {
|
) -> Option<(Self, Ref<'a, Tokens>)> {
|
||||||
if semantics.is_none() {
|
if lsp.is_none() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Self::from_source_impl(source.clone(), semantics, source)
|
Self::from_source_impl(source.clone(), lsp, source)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
|
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
|
||||||
|
@ -365,42 +370,27 @@ impl<'a> Semantics<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SemanticsHolder {
|
|
||||||
pub tokens: Tokens,
|
|
||||||
pub sems: HashMap<Rc<dyn Source>, SemanticsData>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SemanticsHolder {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
tokens: Tokens::new(),
|
|
||||||
sems: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests {
|
pub mod tests {
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! validate_semantics {
|
macro_rules! validate_semantics {
|
||||||
($state:expr, $source:expr, $idx:expr,) => {};
|
($state:expr, $source:expr, $idx:expr,) => {};
|
||||||
($state:expr, $source:expr, $idx:expr, $token_name:ident { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
|
($state:expr, $source:expr, $idx:expr, $token_name:ident { $($field:ident == $value:expr),* }; $($tail:tt)*) => {{
|
||||||
let token = $state.shared.semantics
|
let token = $state.shared.lsp
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.borrow()
|
.borrow()
|
||||||
.sems
|
.semantic_data
|
||||||
.get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
|
.get(&($source as std::rc::Rc<dyn crate::parser::source::Source>))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.tokens
|
.tokens
|
||||||
.borrow()
|
.borrow()
|
||||||
[$idx];
|
[$idx];
|
||||||
let token_type = $state.shared.semantics
|
let token_type = $state.shared.lsp
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.borrow()
|
.borrow()
|
||||||
.tokens
|
.semantic_tokens
|
||||||
.$token_name;
|
.$token_name;
|
||||||
|
|
||||||
let found_token = (token.token_type, token.token_modifiers_bitset);
|
let found_token = (token.token_type, token.token_modifiers_bitset);
|
||||||
|
|
|
@ -8,14 +8,32 @@ use crate::parser::parser::Parser;
|
||||||
use crate::parser::parser::ParserState;
|
use crate::parser::parser::ParserState;
|
||||||
use crate::parser::source::Token;
|
use crate::parser::source::Token;
|
||||||
|
|
||||||
|
|
||||||
|
/// Redirected data from lua execution
|
||||||
|
pub struct KernelRedirect
|
||||||
|
{
|
||||||
|
/// Message source e.g print()
|
||||||
|
pub source: String,
|
||||||
|
/// Message content
|
||||||
|
pub content: String,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct KernelContext<'a, 'b, 'c> {
|
pub struct KernelContext<'a, 'b, 'c> {
|
||||||
pub location: Token,
|
pub location: Token,
|
||||||
pub state: &'a ParserState<'a, 'b>,
|
pub state: &'a ParserState<'a, 'b>,
|
||||||
pub document: &'c dyn Document<'c>,
|
pub document: &'c dyn Document<'c>,
|
||||||
|
pub redirects: Vec<KernelRedirect>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b, 'c> KernelContext<'a, 'b, 'c> {
|
||||||
|
pub fn new(location: Token, state: &'a ParserState<'a, 'b>, document: &'c dyn Document<'c>) -> Self {
|
||||||
|
Self { location, state, document, redirects: vec![] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
|
pub static CTX: RefCell<Option<&'static mut KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -42,6 +60,18 @@ impl Kernel {
|
||||||
lua.globals().set("nml", nml_table).unwrap();
|
lua.globals().set("nml", nml_table).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lua.globals().set("print", lua.create_function(|_, msg: String| {
|
||||||
|
CTX.with_borrow_mut(|ctx| {
|
||||||
|
ctx.as_mut().map(|ctx| {
|
||||||
|
ctx.redirects.push(KernelRedirect {
|
||||||
|
source: "print".into(),
|
||||||
|
content: msg,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
|
}).unwrap()).unwrap();
|
||||||
|
|
||||||
Self { lua }
|
Self { lua }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,10 +79,11 @@ impl Kernel {
|
||||||
///
|
///
|
||||||
/// This is the only way lua code shoule be ran, because exported
|
/// This is the only way lua code shoule be ran, because exported
|
||||||
/// functions may require the context in order to operate
|
/// functions may require the context in order to operate
|
||||||
pub fn run_with_context<T, F>(&self, context: KernelContext, f: F) -> T
|
pub fn run_with_context<T, F>(&self, context: &mut KernelContext, f: F) -> T
|
||||||
where
|
where
|
||||||
F: FnOnce(&Lua) -> T,
|
F: FnOnce(&Lua) -> T,
|
||||||
{
|
{
|
||||||
|
// Redirects
|
||||||
CTX.set(Some(unsafe { std::mem::transmute(context) }));
|
CTX.set(Some(unsafe { std::mem::transmute(context) }));
|
||||||
let ret = f(&self.lua);
|
let ret = f(&self.lua);
|
||||||
CTX.set(None);
|
CTX.set(None);
|
||||||
|
|
|
@ -5,6 +5,7 @@ use crate::document::document::Document;
|
||||||
use crate::document::element::DocumentEnd;
|
use crate::document::element::DocumentEnd;
|
||||||
use crate::document::langdocument::LangDocument;
|
use crate::document::langdocument::LangDocument;
|
||||||
use crate::elements::text::Text;
|
use crate::elements::text::Text;
|
||||||
|
use crate::lsp::hints::HintsData;
|
||||||
use crate::lsp::semantic::SemanticsData;
|
use crate::lsp::semantic::SemanticsData;
|
||||||
|
|
||||||
use super::parser::ParseMode;
|
use super::parser::ParseMode;
|
||||||
|
@ -81,16 +82,20 @@ impl<'b> Parser for LangParser<'b> {
|
||||||
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
|
) -> (Box<dyn Document<'doc> + 'doc>, ParserState<'p, 'a>) {
|
||||||
let doc = LangDocument::new(source.clone(), parent);
|
let doc = LangDocument::new(source.clone(), parent);
|
||||||
|
|
||||||
// Insert semantics into state
|
// Insert lsp data into state
|
||||||
if let (Some(_), Some(semantics)) = (
|
if let (Some(_), Some(lsp)) = (
|
||||||
source.clone().downcast_rc::<SourceFile>().ok(),
|
source.clone().downcast_rc::<SourceFile>().ok(),
|
||||||
state.shared.semantics.as_ref(),
|
state.shared.lsp.as_ref(),
|
||||||
) {
|
) {
|
||||||
let mut b = semantics.borrow_mut();
|
let mut b = lsp.borrow_mut();
|
||||||
if !b.sems.contains_key(&source) {
|
if !b.semantic_data.contains_key(&source) {
|
||||||
b.sems
|
b.semantic_data
|
||||||
.insert(source.clone(), SemanticsData::new(source.clone()));
|
.insert(source.clone(), SemanticsData::new(source.clone()));
|
||||||
}
|
}
|
||||||
|
if !b.inlay_hints.contains_key(&source) {
|
||||||
|
b.inlay_hints
|
||||||
|
.insert(source.clone(), HintsData::new(source.clone()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let content = source.content();
|
let content = source.content();
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::document::element::ContainerElement;
|
||||||
use crate::document::element::ElemKind;
|
use crate::document::element::ElemKind;
|
||||||
use crate::document::element::Element;
|
use crate::document::element::Element;
|
||||||
use crate::elements::paragraph::Paragraph;
|
use crate::elements::paragraph::Paragraph;
|
||||||
use crate::lsp::semantic::SemanticsHolder;
|
use crate::lsp::data::LSPData;
|
||||||
use crate::lua::kernel::Kernel;
|
use crate::lua::kernel::Kernel;
|
||||||
use crate::lua::kernel::KernelHolder;
|
use crate::lua::kernel::KernelHolder;
|
||||||
use ariadne::Color;
|
use ariadne::Color;
|
||||||
|
@ -66,8 +66,8 @@ pub struct SharedState {
|
||||||
/// The custom styles
|
/// The custom styles
|
||||||
pub custom_styles: RefCell<CustomStyleHolder>,
|
pub custom_styles: RefCell<CustomStyleHolder>,
|
||||||
|
|
||||||
/// The semantics
|
/// The lsp data
|
||||||
pub semantics: Option<RefCell<SemanticsHolder>>,
|
pub lsp: Option<RefCell<LSPData>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SharedState {
|
impl SharedState {
|
||||||
|
@ -79,7 +79,7 @@ impl SharedState {
|
||||||
styles: RefCell::new(StyleHolder::default()),
|
styles: RefCell::new(StyleHolder::default()),
|
||||||
layouts: RefCell::new(LayoutHolder::default()),
|
layouts: RefCell::new(LayoutHolder::default()),
|
||||||
custom_styles: RefCell::new(CustomStyleHolder::default()),
|
custom_styles: RefCell::new(CustomStyleHolder::default()),
|
||||||
semantics: enable_semantics.then_some(RefCell::new(SemanticsHolder::new())),
|
lsp: enable_semantics.then_some(RefCell::new(LSPData::new())),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Register default kernel
|
// Register default kernel
|
||||||
|
|
|
@ -27,6 +27,7 @@ struct Backend {
|
||||||
document_map: DashMap<String, String>,
|
document_map: DashMap<String, String>,
|
||||||
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
semantic_token_map: DashMap<String, Vec<SemanticToken>>,
|
||||||
diagnostic_map: DashMap<String, Vec<Diagnostic>>,
|
diagnostic_map: DashMap<String, Vec<Diagnostic>>,
|
||||||
|
hints_map: DashMap<String, Vec<InlayHint>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -47,10 +48,13 @@ impl Backend {
|
||||||
params.text.clone(),
|
params.text.clone(),
|
||||||
None,
|
None,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Diagnostics
|
||||||
self.diagnostic_map.clear();
|
self.diagnostic_map.clear();
|
||||||
let parser = LangParser::new(false, Box::new(
|
let parser = LangParser::new(false, Box::new(
|
||||||
|_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports)
|
|_colors, reports| Report::reports_to_diagnostics(&self.diagnostic_map, reports)
|
||||||
));
|
));
|
||||||
|
// Parse
|
||||||
let (_doc, state) = parser.parse(
|
let (_doc, state) = parser.parse(
|
||||||
ParserState::new_with_semantics(&parser, None),
|
ParserState::new_with_semantics(&parser, None),
|
||||||
source.clone(),
|
source.clone(),
|
||||||
|
@ -58,9 +62,10 @@ impl Backend {
|
||||||
ParseMode::default(),
|
ParseMode::default(),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(sems) = state.shared.semantics.as_ref() {
|
// Semantics
|
||||||
let borrow = sems.borrow();
|
if let Some(lsp) = state.shared.lsp.as_ref() {
|
||||||
for (source, sem) in &borrow.sems {
|
let borrow = lsp.borrow();
|
||||||
|
for (source, sem) in &borrow.semantic_data {
|
||||||
if let Some(path) = source
|
if let Some(path) = source
|
||||||
.clone()
|
.clone()
|
||||||
.downcast_rc::<SourceFile>()
|
.downcast_rc::<SourceFile>()
|
||||||
|
@ -72,6 +77,22 @@ impl Backend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hints
|
||||||
|
if let Some(lsp) = state.shared.lsp.as_ref() {
|
||||||
|
let borrow = lsp.borrow();
|
||||||
|
for (source, hints) in &borrow.inlay_hints {
|
||||||
|
if let Some(path) = source
|
||||||
|
.clone()
|
||||||
|
.downcast_rc::<SourceFile>()
|
||||||
|
.ok()
|
||||||
|
.map(|source| source.path().to_owned())
|
||||||
|
{
|
||||||
|
self.hints_map
|
||||||
|
.insert(path, hints.hints.replace(vec![]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,6 +148,7 @@ impl LanguageServer for Backend {
|
||||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
|
inlay_hint_provider: Some(OneOf::Left(true)),
|
||||||
..ServerCapabilities::default()
|
..ServerCapabilities::default()
|
||||||
},
|
},
|
||||||
server_info: Some(ServerInfo {
|
server_info: Some(ServerInfo {
|
||||||
|
@ -181,12 +203,12 @@ impl LanguageServer for Backend {
|
||||||
&self,
|
&self,
|
||||||
params: SemanticTokensParams,
|
params: SemanticTokensParams,
|
||||||
) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
|
) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
|
||||||
let uri = params.text_document.uri.to_string();
|
let uri = params.text_document.uri;
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::LOG, "semantic_token_full")
|
.log_message(MessageType::LOG, "semantic_token_full")
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
if let Some(semantic_tokens) = self.semantic_token_map.get(&uri) {
|
if let Some(semantic_tokens) = self.semantic_token_map.get(uri.as_str()) {
|
||||||
let data = semantic_tokens
|
let data = semantic_tokens
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|token| Some(token.clone()))
|
.filter_map(|token| Some(token.clone()))
|
||||||
|
@ -218,6 +240,17 @@ impl LanguageServer for Backend {
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn inlay_hint(&self, params: InlayHintParams) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>>
|
||||||
|
{
|
||||||
|
if let Some(hints) = self.hints_map.get(params.text_document.uri.as_str())
|
||||||
|
{
|
||||||
|
let (_, data) = hints.pair();
|
||||||
|
|
||||||
|
return Ok(Some(data.to_owned()));
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
|
@ -230,6 +263,7 @@ async fn main() {
|
||||||
document_map: DashMap::new(),
|
document_map: DashMap::new(),
|
||||||
semantic_token_map: DashMap::new(),
|
semantic_token_map: DashMap::new(),
|
||||||
diagnostic_map: DashMap::new(),
|
diagnostic_map: DashMap::new(),
|
||||||
|
hints_map: DashMap::new(),
|
||||||
});
|
});
|
||||||
Server::new(stdin, stdout, socket).serve(service).await;
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue