Fix blockquotes

This commit is contained in:
ef3d0c3e 2024-10-25 14:53:31 +02:00
parent 23f9066f75
commit 9fcccdd137
4 changed files with 111 additions and 12 deletions

View file

@ -6,6 +6,7 @@ use std::rc::Rc;
use blockquote_style::AuthorPos::After;
use blockquote_style::AuthorPos::Before;
use blockquote_style::BlockquoteStyle;
use lsp::semantic::Semantics;
use regex::Match;
use regex::Regex;
use runtime_format::FormatArgs;
@ -196,8 +197,8 @@ impl BlockquoteRule {
);
Self {
start_re: Regex::new(r"(?:^|\n)>(?:\[((?:\\.|[^\\\\])*?)\])?\s*?(.*)").unwrap(),
continue_re: Regex::new(r"(?:^|\n)>\s*?(.*)").unwrap(),
start_re: Regex::new(r"(?:^|\n)>(?:\[((?:\\.|[^\\\\])*?)\])?[^\S\r\n]*(.*)").unwrap(),
continue_re: Regex::new(r"(?:^|\n)>[^\S\r\n]*(.*)").unwrap(),
properties: PropertyParser { properties: props },
}
}
@ -282,27 +283,55 @@ impl Rule for BlockquoteRule {
}
}
if let Some((sems, tokens)) =
Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{
let range = captures.get(0).unwrap().range();
let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start };
sems.add(start..start+1, tokens.blockquote_marker);
if let Some(props) = captures.get(1).map(|m| m.range()) {
sems.add(props.start - 1..props.start, tokens.blockquote_props_sep);
sems.add(props.clone(), tokens.blockquote_props);
sems.add(props.end..props.end + 1, tokens.blockquote_props_sep);
}
}
// Content
let entry_start = captures.get(0).unwrap().start();
let entry_start = captures.get(2).unwrap().start();
let mut entry_content = captures.get(2).unwrap().as_str().to_string();
let mut offsets = vec![];
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
if captures.get(0).unwrap().start() != end_cursor.pos {
break;
}
// Advance cursor
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
// Offset
let last = offsets.last().map_or(0, |(_, last)| *last);
offsets.push((
entry_content.len(),
last + (captures.get(1).unwrap().start() - captures.get(0).unwrap().start() - 1) as isize
));
let trimmed = captures.get(1).unwrap().as_str().trim_start().trim_end();
entry_content += "\n";
entry_content += trimmed;
entry_content += captures.get(1).unwrap().as_str();
if let Some((sems, tokens)) =
Semantics::from_source(cursor.source.clone(), &state.shared.lsp)
{
let range = captures.get(0).unwrap().range();
let start = if content.as_bytes()[range.start] == b'\n' { range.start+1 } else { range.start };
sems.add_to_queue(start..start+1, tokens.blockquote_marker);
}
}
// Parse entry content
let token = Token::new(entry_start..end_cursor.pos, end_cursor.source.clone());
let entry_src = Rc::new(VirtualSource::new(
let entry_src = Rc::new(VirtualSource::new_offsets(
token.clone(),
"Blockquote Entry".to_string(),
entry_content,
offsets
));
// Parse content
let parsed_doc = state.with_state(|new_state| {

View file

@ -352,11 +352,11 @@ impl RegexRule for GraphRule {
tokens.graph_sep,
);
if let Some(props) = matches.get(1).map(|m| m.range()) {
sems.add(props.start - 1..props.start, tokens.tex_props_sep);
sems.add(props.clone(), tokens.tex_props);
sems.add(props.end..props.end + 1, tokens.tex_props_sep);
sems.add(props.start - 1..props.start, tokens.graph_props_sep);
sems.add(props.clone(), tokens.graph_props);
sems.add(props.end..props.end + 1, tokens.graph_props_sep);
}
sems.add(matches.get(2).unwrap().range(), tokens.tex_content);
sems.add(matches.get(2).unwrap().range(), tokens.graph_content);
sems.add(
range.end - 8..range.end,
tokens.graph_sep,

View file

@ -1,5 +1,6 @@
use std::cell::Ref;
use std::cell::RefCell;
use std::collections::VecDeque;
use std::ops::Range;
use std::rc::Rc;
@ -144,6 +145,10 @@ pub struct Tokens {
pub list_props_sep: (u32, u32),
pub list_props: (u32, u32),
pub blockquote_marker: (u32, u32),
pub blockquote_props_sep: (u32, u32),
pub blockquote_props: (u32, u32),
pub raw_sep: (u32, u32),
pub raw_props_sep: (u32, u32),
pub raw_props: (u32, u32),
@ -223,6 +228,10 @@ impl Tokens {
list_props_sep: token!("operator"),
list_props: token!("enum"),
blockquote_marker: token!("macro"),
blockquote_props_sep: token!("operator"),
blockquote_props: token!("enum"),
raw_sep: token!("operator"),
raw_props_sep: token!("operator"),
raw_props: token!("enum"),
@ -253,6 +262,9 @@ pub struct SemanticsData {
/// The current cursor
cursor: RefCell<LineCursor>,
/// Semantic tokens that can't be added directly
pub semantic_queue: RefCell<VecDeque<(Range<usize>, (u32, u32))>>,
/// Semantic tokens
pub tokens: RefCell<Vec<SemanticToken>>,
}
@ -261,6 +273,7 @@ impl SemanticsData {
pub fn new(source: Rc<dyn Source>) -> Self {
Self {
cursor: RefCell::new(LineCursor::new(source)),
semantic_queue: RefCell::new(VecDeque::new()),
tokens: RefCell::new(vec![]),
}
}
@ -329,8 +342,41 @@ impl<'a> Semantics<'a> {
Self::from_source_impl(source.clone(), lsp, source)
}
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
let range = self.original_source.original_range(range).1;
/// Method that should be called at the end of parsing
///
/// This function will process the end of the semantic queue
pub fn on_document_end(lsp: &'a Option<RefCell<LSPData>>, source: Rc<dyn Source>)
{
if source.content().is_empty()
{
return;
}
let pos = source.original_position(source.content().len() - 1).1;
if let Some((sems, _)) = Self::from_source(source, lsp)
{
sems.process_queue(pos);
}
}
/// Processes the semantic queue up to a certain position
fn process_queue(&self, pos: usize)
{
let mut queue = self.sems.semantic_queue.borrow_mut();
while !queue.is_empty()
{
let (range, token) = queue.front().unwrap();
if range.start > pos
{
break;
}
self.add_impl(range.to_owned(), token.to_owned());
queue.pop_front();
}
}
fn add_impl(&self, range: Range<usize>, token: (u32, u32))
{
let mut tokens = self.sems.tokens.borrow_mut();
let mut cursor = self.sems.cursor.borrow_mut();
let mut current = cursor.clone();
@ -368,6 +414,26 @@ impl<'a> Semantics<'a> {
cursor.move_to(pos + len);
}
}
/// Add a semantic token to be processed instantly
pub fn add(&self, range: Range<usize>, token: (u32, u32)) {
let range = self.original_source.original_range(range).1;
self.process_queue(range.start);
self.add_impl(range, token);
}
/// Add a semantic token to be processed in a future call to `add()`
pub fn add_to_queue(&self, range: Range<usize>, token: (u32, u32))
{
let range = self.original_source.original_range(range).1;
let mut queue = self.sems.semantic_queue.borrow_mut();
match queue.binary_search_by_key(&range.start, |(range, _)| range.start)
{
Ok(pos) | Err(pos) => {
queue.insert(pos, (range, token))
},
}
}
}
#[cfg(test)]

View file

@ -6,6 +6,7 @@ use crate::document::element::DocumentEnd;
use crate::document::langdocument::LangDocument;
use crate::elements::text::Text;
use crate::lsp::hints::HintsData;
use crate::lsp::semantic::Semantics;
use crate::lsp::semantic::SemanticsData;
use super::parser::ParseMode;
@ -144,6 +145,9 @@ impl<'b> Parser for LangParser<'b> {
}
}
// Process the end of the semantics queue
Semantics::on_document_end(&state.shared.lsp, source.clone());
// Rule States
self.handle_reports(state.shared.rule_state.borrow_mut().on_scope_end(
&state,