Clippu
This commit is contained in:
parent
fa68b68bf6
commit
6eae5cd79b
37 changed files with 178 additions and 238 deletions
4
src/cache/cache.rs
vendored
4
src/cache/cache.rs
vendored
|
@ -62,10 +62,10 @@ pub trait Cached {
|
|||
|
||||
if let Some(value) = value {
|
||||
// Found in cache
|
||||
return Ok(value);
|
||||
Ok(value)
|
||||
} else {
|
||||
// Compute a value
|
||||
let value = match f(&self) {
|
||||
let value = match f(self) {
|
||||
Ok(val) => val,
|
||||
Err(e) => return Err(CachedError::GenErr(e)),
|
||||
};
|
||||
|
|
|
@ -48,7 +48,7 @@ impl<'a> Compiler<'a> {
|
|||
.borrow_mut()
|
||||
.last_mut()
|
||||
.map(|id| *id += 1);
|
||||
return Ref::map(self.sections_counter.borrow(), |b| &*b);
|
||||
return Ref::map(self.sections_counter.borrow(), |b| b);
|
||||
}
|
||||
|
||||
// Close
|
||||
|
@ -61,7 +61,7 @@ impl<'a> Compiler<'a> {
|
|||
self.sections_counter.borrow_mut().push(1);
|
||||
}
|
||||
|
||||
Ref::map(self.sections_counter.borrow(), |b| &*b)
|
||||
Ref::map(self.sections_counter.borrow(), |b| b)
|
||||
}
|
||||
|
||||
/// Sanitizes text for a [`Target`]
|
||||
|
@ -86,7 +86,7 @@ impl<'a> Compiler<'a> {
|
|||
///
|
||||
/// # Parameters
|
||||
/// - [`reference`] The reference to get or insert
|
||||
pub fn reference_id<'b>(&self, document: &'b dyn Document, reference: ElemReference) -> usize {
|
||||
pub fn reference_id(&self, document: &dyn Document, reference: ElemReference) -> usize {
|
||||
let mut borrow = self.reference_count.borrow_mut();
|
||||
let reference = document.get_from_reference(&reference).unwrap();
|
||||
let refkey = reference.refcount_key();
|
||||
|
@ -133,7 +133,6 @@ impl<'a> Compiler<'a> {
|
|||
) -> Option<Rc<dyn Variable>> {
|
||||
document
|
||||
.get_variable(var_name)
|
||||
.and_then(|var| Some(var))
|
||||
.or_else(|| {
|
||||
println!(
|
||||
"Missing variable `{var_name}` in {}",
|
||||
|
|
|
@ -125,7 +125,7 @@ pub fn create_navigation(
|
|||
let title = doc_borrow
|
||||
.get_variable("nav.title")
|
||||
.or(doc_borrow.get_variable("doc.title"));
|
||||
let previous = doc_borrow.get_variable("nav.previous").map(|s| s.clone());
|
||||
let previous = doc_borrow.get_variable("nav.previous").cloned();
|
||||
let path = doc_borrow.get_variable("compiler.output");
|
||||
|
||||
let (title, path) = match (title, path) {
|
||||
|
@ -205,7 +205,7 @@ pub fn create_navigation(
|
|||
// Sort entries
|
||||
fn sort_entries(nav: &mut NavEntry) {
|
||||
nav.entries
|
||||
.sort_unstable_by(|l, r| NavEntry::sort_entry(l, r));
|
||||
.sort_unstable_by(NavEntry::sort_entry);
|
||||
|
||||
for (_, child) in &mut nav.children {
|
||||
sort_entries(child);
|
||||
|
|
|
@ -38,7 +38,7 @@ impl PostProcess {
|
|||
return Err(format!("Cannot use an unspecific reference for reference named: `{name}`. Found in document `{}` but also in `{}`. Specify the source of the reference to resolve the conflict.", previous_doc.borrow().input, doc.borrow().input));
|
||||
}
|
||||
|
||||
found_ref = Some((found.clone(), &doc));
|
||||
found_ref = Some((found.clone(), doc));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,9 +67,7 @@ impl PostProcess {
|
|||
}
|
||||
if let Some((found_ref, found_doc)) = &found_ref {
|
||||
let found_borrow = found_doc.borrow();
|
||||
let found_path = found_borrow.get_variable("compiler.output").ok_or(format!(
|
||||
"Unable to get the output. Aborting postprocessing."
|
||||
))?;
|
||||
let found_path = found_borrow.get_variable("compiler.output").ok_or("Unable to get the output. Aborting postprocessing.".to_string())?;
|
||||
let insert_content = format!("{found_path}#{found_ref}");
|
||||
content.insert_str(pos + offset, insert_content.as_str());
|
||||
offset += insert_content.len();
|
||||
|
|
|
@ -74,7 +74,7 @@ pub fn process(
|
|||
|
||||
let con = db_path
|
||||
.as_ref()
|
||||
.map_or(Connection::open_in_memory(), |path| Connection::open(path))
|
||||
.map_or(Connection::open_in_memory(), Connection::open)
|
||||
.map_err(|err| format!("Unable to open connection to the database: {err}"))?;
|
||||
CompiledDocument::init_cache(&con)
|
||||
.map_err(|err| format!("Failed to initialize cached document table: {err}"))?;
|
||||
|
@ -137,7 +137,7 @@ pub fn process(
|
|||
let body = postprocess
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.apply(target, &compiled, &doc)?;
|
||||
.apply(target, &compiled, doc)?;
|
||||
doc.borrow_mut().body = body;
|
||||
|
||||
// Insert into cache
|
||||
|
@ -157,6 +157,7 @@ pub fn process(
|
|||
|
||||
/// Processes sources from in-memory strings
|
||||
/// This function is indented for testing
|
||||
#[cfg(test)]
|
||||
pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(RefCell<CompiledDocument>, Option<PostProcess>)>, String> {
|
||||
let mut compiled = vec![];
|
||||
|
||||
|
@ -187,7 +188,7 @@ pub fn process_from_memory(target: Target, sources: Vec<String>) -> Result<Vec<(
|
|||
let body = postprocess
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.apply(target, &compiled, &doc)?;
|
||||
.apply(target, &compiled, doc)?;
|
||||
doc.borrow_mut().body = body;
|
||||
}
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ impl Scope {
|
|||
|
||||
// Variables
|
||||
self.variables
|
||||
.extend(other.variables.drain().map(|(name, var)| (name, var)));
|
||||
.extend(other.variables.drain());
|
||||
}
|
||||
false => {
|
||||
// References
|
||||
|
@ -165,15 +165,15 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
fn get_variable(&self, name: &str) -> Option<Rc<dyn Variable>> {
|
||||
match self.scope().borrow().variables.get(name) {
|
||||
Some(variable) => {
|
||||
return Some(variable.clone());
|
||||
Some(variable.clone())
|
||||
}
|
||||
|
||||
// Continue search recursively
|
||||
None => match self.parent() {
|
||||
Some(parent) => return parent.get_variable(name),
|
||||
Some(parent) => parent.get_variable(name),
|
||||
|
||||
// Not found
|
||||
None => return None,
|
||||
None => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -189,27 +189,23 @@ pub trait Document<'a>: core::fmt::Debug {
|
|||
scope: &RefCell<Scope>,
|
||||
merge_as: Option<&String>,
|
||||
) {
|
||||
match merge_as {
|
||||
Some(merge_as) => self.scope().borrow_mut().merge(
|
||||
&mut *scope.borrow_mut(),
|
||||
merge_as,
|
||||
self.content().borrow().len(),
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
if let Some(merge_as) = merge_as { self.scope().borrow_mut().merge(
|
||||
&mut scope.borrow_mut(),
|
||||
merge_as,
|
||||
self.content().borrow().len(),
|
||||
) }
|
||||
|
||||
// Content
|
||||
self.content()
|
||||
.borrow_mut()
|
||||
.extend((content.borrow_mut()).drain(..).map(|value| value));
|
||||
.extend((content.borrow_mut()).drain(..));
|
||||
}
|
||||
|
||||
fn get_reference(&self, refname: &str) -> Option<ElemReference> {
|
||||
self.scope()
|
||||
.borrow()
|
||||
.referenceable
|
||||
.get(refname)
|
||||
.and_then(|reference| Some(*reference))
|
||||
.get(refname).copied()
|
||||
}
|
||||
|
||||
fn get_from_reference(
|
||||
|
|
|
@ -20,8 +20,8 @@ pub struct LangDocument<'a> {
|
|||
impl<'a> LangDocument<'a> {
|
||||
pub fn new(source: Rc<dyn Source>, parent: Option<&'a dyn Document<'a>>) -> Self {
|
||||
Self {
|
||||
source: source,
|
||||
parent: parent,
|
||||
source,
|
||||
parent,
|
||||
content: RefCell::new(Vec::new()),
|
||||
scope: RefCell::new(Scope::new()),
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ impl<'a> Document<'a> for LangDocument<'a> {
|
|||
fn source(&self) -> Rc<dyn Source> { self.source.clone() }
|
||||
|
||||
fn parent(&self) -> Option<&'a dyn Document<'a>> {
|
||||
self.parent.and_then(|p| Some(p as &dyn Document<'a>))
|
||||
self.parent.map(|p| p as &dyn Document<'a>)
|
||||
}
|
||||
|
||||
fn content(&self) -> &RefCell<Vec<Box<dyn Element>>> { &self.content }
|
||||
|
|
|
@ -92,13 +92,13 @@ impl Variable for PathVariable {
|
|||
fn name(&self) -> &str { self.name.as_str() }
|
||||
|
||||
fn from_string(&mut self, str: &str) -> Option<String> {
|
||||
self.path = PathBuf::from(std::fs::canonicalize(str).unwrap());
|
||||
self.path = std::fs::canonicalize(str).unwrap();
|
||||
None
|
||||
}
|
||||
|
||||
fn to_string(&self) -> String { self.path.to_str().unwrap().to_string() }
|
||||
|
||||
fn parse<'a>(&self, state: &ParserState, location: Token, document: &'a dyn Document) {
|
||||
fn parse(&self, state: &ParserState, location: Token, document: &dyn Document) {
|
||||
let source = Rc::new(VirtualSource::new(
|
||||
location,
|
||||
self.name().to_string(),
|
||||
|
|
|
@ -85,7 +85,7 @@ impl Element for Blockquote {
|
|||
|
||||
if self.cite.is_some() || self.author.is_some() {
|
||||
result += r#"<p class="blockquote-author">"#;
|
||||
let fmt_pair = FmtPair(compiler.target(), &self);
|
||||
let fmt_pair = FmtPair(compiler.target(), self);
|
||||
match (self.author.is_some(), self.cite.is_some()) {
|
||||
(true, true) => {
|
||||
let args =
|
||||
|
@ -215,10 +215,7 @@ impl Rule for BlockquoteRule {
|
|||
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.start_re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.map_or(None, |m| {
|
||||
Some((m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
})
|
||||
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
}
|
||||
|
||||
fn on_match<'a>(
|
||||
|
@ -331,7 +328,7 @@ impl Rule for BlockquoteRule {
|
|||
);
|
||||
break;
|
||||
}
|
||||
Ok(mut paragraph) => std::mem::replace(&mut paragraph.content, vec![]),
|
||||
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
|
||||
};
|
||||
|
||||
// Get style
|
||||
|
@ -376,7 +373,7 @@ mod blockquote_style {
|
|||
|
||||
use crate::impl_elementstyle;
|
||||
|
||||
pub static STYLE_KEY: &'static str = "style.blockquote";
|
||||
pub static STYLE_KEY: &str = "style.blockquote";
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum AuthorPos {
|
||||
|
|
|
@ -123,8 +123,8 @@ impl Code {
|
|||
}
|
||||
|
||||
result +=
|
||||
format!("<div class=\"code-block-content\"><table cellspacing=\"0\">").as_str();
|
||||
for (line_id, line) in self.code.split(|c| c == '\n').enumerate() {
|
||||
"<div class=\"code-block-content\"><table cellspacing=\"0\">".to_string().as_str();
|
||||
for (line_id, line) in self.code.split('\n').enumerate() {
|
||||
result += "<tr><td class=\"code-block-gutter\">";
|
||||
|
||||
// Line number
|
||||
|
@ -137,7 +137,7 @@ impl Code {
|
|||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Error highlighting line `{line}`: {}",
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
}
|
||||
Ok(regions) => {
|
||||
|
@ -146,7 +146,7 @@ impl Code {
|
|||
syntect::html::IncludeBackground::No,
|
||||
) {
|
||||
Err(e) => {
|
||||
return Err(format!("Error highlighting code: {}", e.to_string()))
|
||||
return Err(format!("Error highlighting code: {}", e))
|
||||
}
|
||||
Ok(highlighted) => {
|
||||
result += if highlighted.is_empty() {
|
||||
|
@ -165,14 +165,14 @@ impl Code {
|
|||
} else if self.block == CodeKind::MiniBlock {
|
||||
result += "<div class=\"code-block\"><div class=\"code-block-content\"><table cellspacing=\"0\">";
|
||||
|
||||
for line in self.code.split(|c| c == '\n') {
|
||||
for line in self.code.split('\n') {
|
||||
result += "<tr><td class=\"code-block-line\"><pre>";
|
||||
// Code
|
||||
match h.highlight_line(line, Code::get_syntaxes()) {
|
||||
Err(e) => {
|
||||
return Err(format!(
|
||||
"Error highlighting line `{line}`: {}",
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
}
|
||||
Ok(regions) => {
|
||||
|
@ -181,7 +181,7 @@ impl Code {
|
|||
syntect::html::IncludeBackground::No,
|
||||
) {
|
||||
Err(e) => {
|
||||
return Err(format!("Error highlighting code: {}", e.to_string()))
|
||||
return Err(format!("Error highlighting code: {}", e))
|
||||
}
|
||||
Ok(highlighted) => {
|
||||
result += if highlighted.is_empty() {
|
||||
|
@ -203,7 +203,7 @@ impl Code {
|
|||
return Err(format!(
|
||||
"Error highlighting line `{}`: {}",
|
||||
self.code,
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
}
|
||||
Ok(regions) => {
|
||||
|
@ -212,7 +212,7 @@ impl Code {
|
|||
syntect::html::IncludeBackground::No,
|
||||
) {
|
||||
Err(e) => {
|
||||
return Err(format!("Error highlighting code: {}", e.to_string()))
|
||||
return Err(format!("Error highlighting code: {}", e))
|
||||
}
|
||||
Ok(highlighted) => result += highlighted.as_str(),
|
||||
}
|
||||
|
@ -244,11 +244,10 @@ impl Cached for Code {
|
|||
fn key(&self) -> <Self as Cached>::Key {
|
||||
let mut hasher = Sha512::new();
|
||||
hasher.input((self.block as usize).to_be_bytes().as_slice());
|
||||
hasher.input((self.line_offset as usize).to_be_bytes().as_slice());
|
||||
self.theme
|
||||
.as_ref()
|
||||
.map(|theme| hasher.input(theme.as_bytes()));
|
||||
self.name.as_ref().map(|name| hasher.input(name.as_bytes()));
|
||||
hasher.input(self.line_offset.to_be_bytes().as_slice());
|
||||
if let Some(theme) = self.theme
|
||||
.as_ref() { hasher.input(theme.as_bytes()) }
|
||||
if let Some(name) = self.name.as_ref() { hasher.input(name.as_bytes()) }
|
||||
hasher.input(self.language.as_bytes());
|
||||
hasher.input(self.code.as_bytes());
|
||||
|
||||
|
@ -335,11 +334,11 @@ impl RegexRule for CodeRule {
|
|||
|
||||
fn regexes(&self) -> &[regex::Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
index: usize,
|
||||
state: &ParserState,
|
||||
document: &'a dyn Document,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
|
@ -432,7 +431,7 @@ impl RegexRule for CodeRule {
|
|||
} else {
|
||||
util::process_escaped('\\', "``", matches.get(3).unwrap().as_str())
|
||||
};
|
||||
if code_content.bytes().last() == Some('\n' as u8)
|
||||
if code_content.bytes().last() == Some(b'\n')
|
||||
// Remove newline
|
||||
{
|
||||
code_content.pop();
|
||||
|
@ -453,8 +452,7 @@ impl RegexRule for CodeRule {
|
|||
}
|
||||
|
||||
let theme = document
|
||||
.get_variable("code.theme")
|
||||
.and_then(|var| Some(var.to_string()));
|
||||
.get_variable("code.theme").map(|var| var.to_string());
|
||||
|
||||
if index == 0
|
||||
// Block
|
||||
|
@ -553,8 +551,7 @@ impl RegexRule for CodeRule {
|
|||
ctx.as_ref().map(|ctx| {
|
||||
let theme = ctx
|
||||
.document
|
||||
.get_variable("code.theme")
|
||||
.and_then(|var| Some(var.to_string()));
|
||||
.get_variable("code.theme").map(|var| var.to_string());
|
||||
|
||||
ctx.state.push(
|
||||
ctx.document,
|
||||
|
@ -584,8 +581,7 @@ impl RegexRule for CodeRule {
|
|||
ctx.as_ref().map(|ctx| {
|
||||
let theme = ctx
|
||||
.document
|
||||
.get_variable("code.theme")
|
||||
.and_then(|var| Some(var.to_string()));
|
||||
.get_variable("code.theme").map(|var| var.to_string());
|
||||
|
||||
ctx.state.push(
|
||||
ctx.document,
|
||||
|
@ -622,8 +618,7 @@ impl RegexRule for CodeRule {
|
|||
ctx.as_ref().map(|ctx| {
|
||||
let theme = ctx
|
||||
.document
|
||||
.get_variable("code.theme")
|
||||
.and_then(|var| Some(var.to_string()));
|
||||
.get_variable("code.theme").map(|var| var.to_string());
|
||||
|
||||
ctx.state.push(
|
||||
ctx.document,
|
||||
|
|
|
@ -50,11 +50,11 @@ impl RegexRule for CommentRule {
|
|||
|
||||
fn regexes(&self) -> &[Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
_: usize,
|
||||
state: &ParserState,
|
||||
document: &'a dyn Document,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
|
@ -89,7 +89,7 @@ impl RegexRule for CommentRule {
|
|||
}),
|
||||
);
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -140,13 +140,10 @@ impl RuleState for CustomStyleState {
|
|||
let paragraph = document.last_element::<Paragraph>().unwrap();
|
||||
let paragraph_end = paragraph
|
||||
.content
|
||||
.last()
|
||||
.and_then(|last| {
|
||||
Some((
|
||||
.last().map(|last| (
|
||||
last.location().source(),
|
||||
last.location().end() - 1..last.location().end(),
|
||||
))
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
reports.push(
|
||||
|
@ -164,7 +161,7 @@ impl RuleState for CustomStyleState {
|
|||
.with_label(
|
||||
Label::new(paragraph_end)
|
||||
.with_order(1)
|
||||
.with_message(format!("Paragraph ends here"))
|
||||
.with_message("Paragraph ends here".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_note("Styles cannot span multiple documents (i.e @import)")
|
||||
|
@ -172,11 +169,11 @@ impl RuleState for CustomStyleState {
|
|||
);
|
||||
});
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
}
|
||||
|
||||
static STATE_NAME: &'static str = "elements.custom_style";
|
||||
static STATE_NAME: &str = "elements.custom_style";
|
||||
|
||||
#[auto_registry::auto_registry(registry = "rules", path = "crate::elements::customstyle")]
|
||||
pub struct CustomStyleRule;
|
||||
|
|
|
@ -63,10 +63,7 @@ impl Rule for ElemStyleRule {
|
|||
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.start_re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.map_or(None, |m| {
|
||||
Some((m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
})
|
||||
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
}
|
||||
|
||||
fn on_match<'a>(
|
||||
|
@ -93,7 +90,7 @@ impl Rule for ElemStyleRule {
|
|||
.with_message("Empty Style Key")
|
||||
.with_label(
|
||||
Label::new((cursor.source.clone(), key.range()))
|
||||
.with_message(format!("Expected a non-empty style key",))
|
||||
.with_message("Expected a non-empty style key".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
|
@ -135,9 +132,7 @@ impl Rule for ElemStyleRule {
|
|||
.with_message("Invalid Style Value")
|
||||
.with_label(
|
||||
Label::new((cursor.source.clone(), matches.get(0).unwrap().range()))
|
||||
.with_message(format!(
|
||||
"Unable to parse json string after style key",
|
||||
))
|
||||
.with_message("Unable to parse json string after style key".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
|
|
|
@ -179,6 +179,6 @@ impl RegexRule for ImportRule {
|
|||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
|
|
@ -280,7 +280,7 @@ impl RuleState for LayoutState {
|
|||
);
|
||||
}
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -375,7 +375,7 @@ impl LayoutRule {
|
|||
}
|
||||
}
|
||||
|
||||
static STATE_NAME: &'static str = "elements.layout";
|
||||
static STATE_NAME: &str = "elements.layout";
|
||||
|
||||
impl RegexRule for LayoutRule {
|
||||
fn name(&self) -> &'static str { "Layout" }
|
||||
|
@ -649,7 +649,7 @@ impl RegexRule for LayoutRule {
|
|||
}),
|
||||
);
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
|
||||
// TODO: Add method to create new layouts
|
||||
|
@ -746,7 +746,7 @@ impl RegexRule for LayoutRule {
|
|||
to: Some("push".to_string()),
|
||||
pos: 1,
|
||||
name: Some("token".to_string()),
|
||||
cause: Arc::new(mlua::Error::external(format!("Unable set next layout: No active layout found"))),
|
||||
cause: Arc::new(mlua::Error::external("Unable set next layout: No active layout found".to_string())),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -794,7 +794,7 @@ impl RegexRule for LayoutRule {
|
|||
to: Some("push".to_string()),
|
||||
pos: 1,
|
||||
name: Some("token".to_string()),
|
||||
cause: Arc::new(mlua::Error::external(format!("Unable set layout end: No active layout found"))),
|
||||
cause: Arc::new(mlua::Error::external("Unable set layout end: No active layout found".to_string())),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -153,7 +153,7 @@ impl RegexRule for LinkRule {
|
|||
);
|
||||
return reports;
|
||||
}
|
||||
Ok(mut paragraph) => std::mem::replace(&mut paragraph.content, vec![]),
|
||||
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
|
||||
}
|
||||
}
|
||||
_ => panic!("Empty link name"),
|
||||
|
@ -176,7 +176,7 @@ impl RegexRule for LinkRule {
|
|||
}
|
||||
let text_content = util::process_text(document, url.as_str());
|
||||
|
||||
if text_content.as_str().is_empty() {
|
||||
if text_content.is_empty() {
|
||||
reports.push(
|
||||
Report::build(ReportKind::Error, token.source(), url.start())
|
||||
.with_message("Empty link url")
|
||||
|
@ -207,7 +207,7 @@ impl RegexRule for LinkRule {
|
|||
}),
|
||||
);
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
|
@ -238,7 +238,7 @@ impl RegexRule for LinkRule {
|
|||
return;
|
||||
}
|
||||
Ok(mut paragraph) => {
|
||||
std::mem::replace(&mut paragraph.content, vec![])
|
||||
std::mem::take(&mut paragraph.content)
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -244,7 +244,7 @@ impl ListRule {
|
|||
}
|
||||
});
|
||||
|
||||
return parsed;
|
||||
parsed
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -254,10 +254,7 @@ impl Rule for ListRule {
|
|||
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.start_re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.map_or(None, |m| {
|
||||
Some((m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
})
|
||||
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
}
|
||||
|
||||
fn on_match<'a>(
|
||||
|
@ -329,7 +326,7 @@ impl Rule for ListRule {
|
|||
.get(2)
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.find(|c| c == '*' || c == '-')
|
||||
.find(['*', '-'])
|
||||
== Some(0)
|
||||
{
|
||||
break;
|
||||
|
@ -393,7 +390,7 @@ impl Rule for ListRule {
|
|||
);
|
||||
break;
|
||||
}
|
||||
Ok(mut paragraph) => std::mem::replace(&mut paragraph.content, vec![]),
|
||||
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
|
||||
};
|
||||
|
||||
if let Some(previous_depth) = document
|
||||
|
|
|
@ -158,14 +158,11 @@ impl Element for Medium {
|
|||
|
||||
let caption = self
|
||||
.caption
|
||||
.as_ref()
|
||||
.and_then(|cap| {
|
||||
Some(format!(
|
||||
.as_ref().map(|cap| format!(
|
||||
" {}",
|
||||
Compiler::sanitize(compiler.target(), cap.as_str())
|
||||
))
|
||||
})
|
||||
.unwrap_or(String::new());
|
||||
.unwrap_or_default();
|
||||
|
||||
result.push_str(
|
||||
format!(r#"<p class="medium-refname">({refcount}){caption}</p>"#).as_str(),
|
||||
|
@ -436,15 +433,13 @@ impl RegexRule for MediaRule {
|
|||
.get("width", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, s)| Some(s));
|
||||
.ok().map(|(_, s)| s);
|
||||
|
||||
let caption = properties
|
||||
.get("caption", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, value)| Some(value));
|
||||
.ok().map(|(_, value)| value);
|
||||
|
||||
let description = match matches.get(4) {
|
||||
Some(content) => {
|
||||
|
|
|
@ -110,8 +110,7 @@ impl Rule for ParagraphRule {
|
|||
|
||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||
self.re
|
||||
.find_at(cursor.source.content(), cursor.pos)
|
||||
.and_then(|m| Some((m.start(), Box::new([false; 0]) as Box<dyn Any>)))
|
||||
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||
}
|
||||
|
||||
fn on_match(
|
||||
|
|
|
@ -2,7 +2,6 @@ use std::collections::HashMap;
|
|||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use ariadne::Fmt;
|
||||
use ariadne::Label;
|
||||
use ariadne::Report;
|
||||
use ariadne::ReportKind;
|
||||
|
@ -245,8 +244,7 @@ impl RegexRule for ReferenceRule {
|
|||
.get("caption", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, s)| Some(s));
|
||||
.ok().map(|(_, s)| s);
|
||||
|
||||
if let Some(refdoc) = refdoc {
|
||||
if refdoc.is_empty() {
|
||||
|
|
|
@ -172,7 +172,7 @@ impl RegexRule for ScriptRule {
|
|||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!(
|
||||
"Kernel execution failed:\n{}",
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
|
@ -215,7 +215,7 @@ impl RegexRule for ScriptRule {
|
|||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!(
|
||||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
|
@ -263,7 +263,7 @@ impl RegexRule for ScriptRule {
|
|||
Label::new((source.clone(), 0..source.content().len()))
|
||||
.with_message(format!(
|
||||
"Kernel evaluation failed:\n{}",
|
||||
e.to_string()
|
||||
e
|
||||
))
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
|
|
|
@ -222,7 +222,7 @@ impl RegexRule for SectionRule {
|
|||
.with_message(format!("`{}` previously defined here",
|
||||
refname.as_str().fg(state.parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().warning))
|
||||
.with_note(format!("Previous reference was overwritten"))
|
||||
.with_note("Previous reference was overwritten".to_string())
|
||||
.finish());
|
||||
}
|
||||
Some(refname.as_str().to_string())
|
||||
|
@ -247,7 +247,7 @@ impl RegexRule for SectionRule {
|
|||
"+".fg(state.parser.colors().info),
|
||||
kind.as_str().fg(state.parser.colors().highlight)))
|
||||
.with_color(state.parser.colors().error))
|
||||
.with_help(format!("Leave empty for a numbered listed section"))
|
||||
.with_help("Leave empty for a numbered listed section".to_string())
|
||||
.finish());
|
||||
return result;
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ impl RegexRule for SectionRule {
|
|||
}),
|
||||
);
|
||||
|
||||
return result;
|
||||
result
|
||||
}
|
||||
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
|
@ -331,7 +331,7 @@ impl RegexRule for SectionRule {
|
|||
"push".to_string(),
|
||||
lua.create_function(
|
||||
|_, (title, depth, kind, reference): (String, usize, Option<String>, Option<String>)| {
|
||||
let kind = match kind.as_ref().map(|s| s.as_str()).unwrap_or("") {
|
||||
let kind = match kind.as_deref().unwrap_or("") {
|
||||
"*+" | "+*" => section_kind::NO_NUMBER | section_kind::NO_TOC,
|
||||
"*" => section_kind::NO_NUMBER,
|
||||
"+" => section_kind::NO_TOC,
|
||||
|
@ -341,9 +341,7 @@ impl RegexRule for SectionRule {
|
|||
to: Some("push".to_string()),
|
||||
pos: 3,
|
||||
name: Some("kind".to_string()),
|
||||
cause: Arc::new(mlua::Error::external(format!(
|
||||
"Unknown section kind specified"
|
||||
))),
|
||||
cause: Arc::new(mlua::Error::external("Unknown section kind specified".to_string())),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
@ -394,7 +392,7 @@ mod section_style {
|
|||
|
||||
use crate::impl_elementstyle;
|
||||
|
||||
pub static STYLE_KEY: &'static str = "style.section";
|
||||
pub static STYLE_KEY: &str = "style.section";
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum SectionLinkPos {
|
||||
|
|
|
@ -99,13 +99,10 @@ impl RuleState for StyleState {
|
|||
let paragraph = document.last_element::<Paragraph>().unwrap();
|
||||
let paragraph_end = paragraph
|
||||
.content
|
||||
.last()
|
||||
.and_then(|last| {
|
||||
Some((
|
||||
.last().map(|last| (
|
||||
last.location().source(),
|
||||
last.location().end() - 1..last.location().end(),
|
||||
))
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
reports.push(
|
||||
|
@ -123,7 +120,7 @@ impl RuleState for StyleState {
|
|||
.with_label(
|
||||
Label::new(paragraph_end)
|
||||
.with_order(1)
|
||||
.with_message(format!("Paragraph ends here"))
|
||||
.with_message("Paragraph ends here".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_note("Styles cannot span multiple documents (i.e @import)")
|
||||
|
@ -131,7 +128,7 @@ impl RuleState for StyleState {
|
|||
);
|
||||
});
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,7 +154,7 @@ impl StyleRule {
|
|||
}
|
||||
}
|
||||
|
||||
static STATE_NAME: &'static str = "elements.style";
|
||||
static STATE_NAME: &str = "elements.style";
|
||||
|
||||
impl RegexRule for StyleRule {
|
||||
fn name(&self) -> &'static str { "Style" }
|
||||
|
@ -199,14 +196,14 @@ impl RegexRule for StyleRule {
|
|||
Box::new(Style::new(
|
||||
token.clone(),
|
||||
index,
|
||||
!style_state.toggled[index].is_some(),
|
||||
style_state.toggled[index].is_none(),
|
||||
)),
|
||||
);
|
||||
} else {
|
||||
panic!("Invalid state at `{STATE_NAME}`");
|
||||
}
|
||||
|
||||
return vec![];
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn register_bindings<'lua>(&self, lua: &'lua mlua::Lua) -> Vec<(String, Function<'lua>)> {
|
||||
|
@ -225,9 +222,7 @@ impl RegexRule for StyleRule {
|
|||
to: Some("toggle".to_string()),
|
||||
pos: 1,
|
||||
name: Some("style".to_string()),
|
||||
cause: Arc::new(mlua::Error::external(format!(
|
||||
"Unknown style specified"
|
||||
))),
|
||||
cause: Arc::new(mlua::Error::external("Unknown style specified".to_string())),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
@ -260,7 +255,7 @@ impl RegexRule for StyleRule {
|
|||
Box::new(Style::new(
|
||||
ctx.location.clone(),
|
||||
kind,
|
||||
!style_state.toggled[kind].is_some(),
|
||||
style_state.toggled[kind].is_none(),
|
||||
)),
|
||||
);
|
||||
} else {
|
||||
|
|
|
@ -114,10 +114,7 @@ impl FormattedTex {
|
|||
}
|
||||
|
||||
let mut result = String::new();
|
||||
match process.stdout.unwrap().read_to_string(&mut result) {
|
||||
Err(e) => panic!("Unable to read `latex2svg` stdout: {}", e),
|
||||
Ok(_) => {}
|
||||
}
|
||||
if let Err(e) = process.stdout.unwrap().read_to_string(&mut result) { panic!("Unable to read `latex2svg` stdout: {}", e) }
|
||||
println!("Done!");
|
||||
|
||||
Ok(result)
|
||||
|
@ -410,16 +407,14 @@ impl RegexRule for TexRule {
|
|||
.get("caption", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, value)| Some(value));
|
||||
.ok().map(|(_, value)| value);
|
||||
|
||||
// Environ
|
||||
let tex_env = properties
|
||||
.get("env", |_, value| -> Result<String, ()> {
|
||||
Ok(value.clone())
|
||||
})
|
||||
.ok()
|
||||
.and_then(|(_, value)| Some(value))
|
||||
.ok().map(|(_, value)| value)
|
||||
.unwrap();
|
||||
|
||||
state.push(
|
||||
|
|
|
@ -26,8 +26,8 @@ pub struct Text {
|
|||
impl Text {
|
||||
pub fn new(location: Token, content: String) -> Text {
|
||||
Text {
|
||||
location: location,
|
||||
content: content,
|
||||
location,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ impl VariableRule {
|
|||
Ok(path) => Ok(Rc::new(PathVariable::new(location, name, path))),
|
||||
Err(e) => Err(format!("Unable to canonicalize path `{}`: {}",
|
||||
value.fg(colors.highlight),
|
||||
e.to_string()))
|
||||
e))
|
||||
}
|
||||
}
|
||||
_ => panic!("Unhandled variable kind"),
|
||||
|
@ -83,7 +83,7 @@ impl VariableRule {
|
|||
if name.contains("%") {
|
||||
return Err(format!("Name cannot contain '{}'", "%".fg(colors.info)));
|
||||
}
|
||||
return Ok(name);
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
pub fn validate_value(original_value: &str) -> Result<String, String> {
|
||||
|
@ -122,11 +122,11 @@ impl RegexRule for VariableRule {
|
|||
|
||||
fn regexes(&self) -> &[Regex] { &self.re }
|
||||
|
||||
fn on_regex_match<'a>(
|
||||
fn on_regex_match(
|
||||
&self,
|
||||
_: usize,
|
||||
state: &ParserState,
|
||||
document: &'a dyn Document,
|
||||
document: &dyn Document,
|
||||
token: Token,
|
||||
matches: regex::Captures,
|
||||
) -> Vec<Report<'_, (Rc<dyn Source>, Range<usize>)>> {
|
||||
|
@ -180,7 +180,7 @@ impl RegexRule for VariableRule {
|
|||
};
|
||||
|
||||
let var_name = match matches.get(2) {
|
||||
Some(name) => match VariableRule::validate_name(&state.parser.colors(), name.as_str()) {
|
||||
Some(name) => match VariableRule::validate_name(state.parser.colors(), name.as_str()) {
|
||||
Ok(var_name) => var_name,
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
|
@ -228,7 +228,7 @@ impl RegexRule for VariableRule {
|
|||
};
|
||||
|
||||
match self.make_variable(
|
||||
&state.parser.colors(),
|
||||
state.parser.colors(),
|
||||
token.clone(),
|
||||
var_kind,
|
||||
var_name.to_string(),
|
||||
|
@ -256,7 +256,7 @@ impl RegexRule for VariableRule {
|
|||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
result
|
||||
}
|
||||
|
||||
fn register_bindings<'lua>(&self, lua: &'lua Lua) -> Vec<(String, Function<'lua>)> {
|
||||
|
@ -335,7 +335,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_message("Empty variable name")
|
||||
.with_label(
|
||||
Label::new((token.source(), matches.get(0).unwrap().range()))
|
||||
.with_message(format!("Missing variable name for substitution"))
|
||||
.with_message("Missing variable name for substitution".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
|
@ -350,7 +350,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_message("Invalid variable name")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!("Variable names contains leading spaces"))
|
||||
.with_message("Variable names contains leading spaces".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_help("Remove leading spaces")
|
||||
|
@ -366,9 +366,7 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
.with_message("Invalid variable name")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(format!(
|
||||
"Variable names contains trailing spaces"
|
||||
))
|
||||
.with_message("Variable names contains trailing spaces".to_string())
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.with_help("Remove trailing spaces")
|
||||
|
@ -378,23 +376,20 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
return result;
|
||||
}
|
||||
// Invalid name
|
||||
match VariableRule::validate_name(&state.parser.colors(), name.as_str()) {
|
||||
Err(msg) => {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Invalid variable name")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(msg)
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
if let Err(msg) = VariableRule::validate_name(state.parser.colors(), name.as_str()) {
|
||||
result.push(
|
||||
Report::build(ReportKind::Error, token.source(), name.start())
|
||||
.with_message("Invalid variable name")
|
||||
.with_label(
|
||||
Label::new((token.source(), name.range()))
|
||||
.with_message(msg)
|
||||
.with_color(state.parser.colors().error),
|
||||
)
|
||||
.finish(),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get variable
|
||||
match document.get_variable(name.as_str()) {
|
||||
|
@ -422,6 +417,6 @@ impl RegexRule for VariableSubstitutionRule {
|
|||
|
||||
variable.parse(state, token, document);
|
||||
|
||||
return result;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ pub struct KernelContext<'a, 'b, 'c> {
|
|||
}
|
||||
|
||||
thread_local! {
|
||||
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = RefCell::new(None);
|
||||
pub static CTX: RefCell<Option<KernelContext<'static, 'static, 'static>>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -249,7 +249,7 @@ fn main() -> ExitCode {
|
|||
}
|
||||
};
|
||||
|
||||
let nav = navigation.compile(Target::HTML, &doc);
|
||||
let nav = navigation.compile(Target::HTML, doc);
|
||||
let file = std::fs::File::create(output.clone() + "/" + out_path.as_str()).unwrap();
|
||||
|
||||
let mut writer = BufWriter::new(file);
|
||||
|
@ -285,5 +285,5 @@ fn main() -> ExitCode {
|
|||
}
|
||||
}
|
||||
|
||||
return ExitCode::SUCCESS;
|
||||
ExitCode::SUCCESS
|
||||
}
|
||||
|
|
|
@ -45,8 +45,7 @@ pub struct CustomStyleHolder {
|
|||
impl CustomStyleHolder {
|
||||
pub fn get(&self, style_name: &str) -> Option<Rc<dyn CustomStyle>> {
|
||||
self.custom_styles
|
||||
.get(style_name)
|
||||
.map(|style| style.clone())
|
||||
.get(style_name).cloned()
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, style: Rc<dyn CustomStyle>) {
|
||||
|
|
|
@ -120,7 +120,7 @@ impl Parser for LangParser {
|
|||
))),
|
||||
);
|
||||
|
||||
return (Box::new(doc), state);
|
||||
(Box::new(doc), state)
|
||||
}
|
||||
|
||||
fn parse_into<'p, 'a, 'doc>(
|
||||
|
@ -164,7 +164,7 @@ impl Parser for LangParser {
|
|||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
state
|
||||
// State
|
||||
//self.handle_reports(source.clone(),
|
||||
// self.state_mut().on_scope_end(&self, &document, super::state::Scope::DOCUMENT));
|
||||
|
|
|
@ -36,7 +36,7 @@ pub struct LayoutHolder {
|
|||
|
||||
impl LayoutHolder {
|
||||
pub fn get(&self, layout_name: &str) -> Option<Rc<dyn LayoutType>> {
|
||||
self.layouts.get(layout_name).map(|layout| layout.clone())
|
||||
self.layouts.get(layout_name).cloned()
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, layout: Rc<dyn LayoutType>) {
|
||||
|
|
|
@ -89,8 +89,8 @@ impl SharedState {
|
|||
|
||||
// Default styles & layouts
|
||||
parser.rules().iter().for_each(|rule| {
|
||||
rule.register_styles(&mut *s.styles.borrow_mut());
|
||||
rule.register_layouts(&mut *s.layouts.borrow_mut());
|
||||
rule.register_styles(&mut s.styles.borrow_mut());
|
||||
rule.register_layouts(&mut s.layouts.borrow_mut());
|
||||
});
|
||||
|
||||
s
|
||||
|
@ -198,7 +198,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
let mut escaped = false;
|
||||
'inner: loop {
|
||||
let g = graphemes.next_back();
|
||||
if !g.is_some() || g.unwrap() != "\\" {
|
||||
if g.is_none() || g.unwrap() != "\\" {
|
||||
break 'inner;
|
||||
}
|
||||
|
||||
|
@ -236,10 +236,10 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
return (cursor.at(content.len()), None);
|
||||
}
|
||||
|
||||
return (
|
||||
(
|
||||
cursor.at(next_pos),
|
||||
Some((winner, matches_borrow[winner].1.take().unwrap())),
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
/// Add an [`Element`] to the [`Document`]
|
||||
|
@ -262,7 +262,7 @@ impl<'a, 'b> ParserState<'a, 'b> {
|
|||
if doc.last_element::<Paragraph>().is_some_and(|_| true) {
|
||||
self.parser
|
||||
.handle_reports(self.shared.rule_state.borrow_mut().on_scope_end(
|
||||
&self,
|
||||
self,
|
||||
doc,
|
||||
super::state::Scope::PARAGRAPH,
|
||||
));
|
||||
|
@ -368,10 +368,10 @@ pub trait Parser {
|
|||
///
|
||||
/// This method must not be called if a [`ParserState`] for this parser exists.
|
||||
fn add_rule(&mut self, rule: Box<dyn Rule>) -> Result<(), String> {
|
||||
if let Some(_) = self
|
||||
if self
|
||||
.rules()
|
||||
.iter()
|
||||
.find(|other_rule| other_rule.name() == rule.name())
|
||||
.any(|other_rule| other_rule.name() == rule.name())
|
||||
{
|
||||
return Err(format!(
|
||||
"Attempted to introduce duplicate rule: `{}`",
|
||||
|
@ -391,14 +391,11 @@ pub trait Parser {
|
|||
let mut sources: HashSet<Rc<dyn Source>> = HashSet::new();
|
||||
fn recurse_source(sources: &mut HashSet<Rc<dyn Source>>, source: Rc<dyn Source>) {
|
||||
sources.insert(source.clone());
|
||||
match source.location() {
|
||||
Some(parent) => {
|
||||
let parent_source = parent.source();
|
||||
if sources.get(&parent_source).is_none() {
|
||||
recurse_source(sources, parent_source);
|
||||
}
|
||||
if let Some(parent) = source.location() {
|
||||
let parent_source = parent.source();
|
||||
if sources.get(&parent_source).is_none() {
|
||||
recurse_source(sources, parent_source);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -423,10 +420,8 @@ pub trait Parser {
|
|||
|
||||
if let Some(_s) = source.downcast_ref::<VirtualSource>() {
|
||||
let start = location.start()
|
||||
+ (location.source().content().as_bytes()[location.start()]
|
||||
== '\n' as u8)
|
||||
.then_some(1)
|
||||
.unwrap_or(0);
|
||||
+ if location.source().content().as_bytes()[location.start()]
|
||||
== b'\n' { 1 } else { 0 };
|
||||
report.labels.push(
|
||||
Label::new((location.source(), start..location.end()))
|
||||
.with_message("In evaluation of")
|
||||
|
|
|
@ -53,7 +53,7 @@ pub fn get_rule_registry() -> Vec<Box<dyn Rule>> {
|
|||
}
|
||||
}
|
||||
let mut map = make_rules();
|
||||
let mut sorted_keys = map.iter().map(|(key, _)| *key).collect::<Vec<_>>();
|
||||
let mut sorted_keys = map.keys().copied().collect::<Vec<_>>();
|
||||
sorted_keys.sort_by(|l, r| cmp(&map, l, r));
|
||||
|
||||
let mut owned = Vec::with_capacity(sorted_keys.len());
|
||||
|
@ -136,18 +136,18 @@ impl<T: RegexRule + 'static> Rule for T {
|
|||
self.regexes().iter().enumerate().for_each(|(id, re)| {
|
||||
if let Some(m) = re.find_at(content.as_str(), cursor.pos) {
|
||||
found = found
|
||||
.and_then(|(f_pos, f_id)| {
|
||||
.map(|(f_pos, f_id)| {
|
||||
if f_pos > m.start() {
|
||||
Some((m.start(), id))
|
||||
(m.start(), id)
|
||||
} else {
|
||||
Some((f_pos, f_id))
|
||||
(f_pos, f_id)
|
||||
}
|
||||
})
|
||||
.or(Some((m.start(), id)));
|
||||
}
|
||||
});
|
||||
|
||||
return found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>));
|
||||
found.map(|(pos, id)| (pos, Box::new(id) as Box<dyn Any>))
|
||||
}
|
||||
|
||||
fn on_match<'a>(
|
||||
|
|
|
@ -24,7 +24,6 @@ impl core::fmt::Display for dyn Source {
|
|||
}
|
||||
|
||||
impl core::fmt::Debug for dyn Source {
|
||||
// TODO
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Source{{{}}}", self.name())
|
||||
}
|
||||
|
@ -55,10 +54,10 @@ impl SourceFile {
|
|||
pub fn new(path: String, location: Option<Token>) -> Result<Self, String> {
|
||||
match fs::read_to_string(&path) {
|
||||
Err(_) => {
|
||||
return Err(String::from(format!(
|
||||
Err(format!(
|
||||
"Unable to read file content: `{}`",
|
||||
path
|
||||
)))
|
||||
))
|
||||
}
|
||||
Ok(content) => Ok(Self {
|
||||
location,
|
||||
|
@ -70,9 +69,9 @@ impl SourceFile {
|
|||
|
||||
pub fn with_content(path: String, content: String, location: Option<Token>) -> Self {
|
||||
Self {
|
||||
location: location,
|
||||
path: path,
|
||||
content: content,
|
||||
location,
|
||||
path,
|
||||
content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -162,7 +161,7 @@ impl Token {
|
|||
}
|
||||
|
||||
pub fn source(&self) -> Rc<dyn Source> {
|
||||
return self.source.clone();
|
||||
self.source.clone()
|
||||
}
|
||||
|
||||
/// Construct Token from a range
|
||||
|
@ -176,10 +175,10 @@ impl Token {
|
|||
}
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
return self.range.start;
|
||||
self.range.start
|
||||
}
|
||||
|
||||
pub fn end(&self) -> usize {
|
||||
return self.range.end;
|
||||
self.range.end
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ impl RuleStateHolder {
|
|||
}
|
||||
|
||||
pub fn get(&self, state_name: &str) -> Option<Rc<RefCell<dyn RuleState>>> {
|
||||
self.states.get(state_name).map(|state| state.clone())
|
||||
self.states.get(state_name).cloned()
|
||||
}
|
||||
|
||||
pub fn on_scope_end(
|
||||
|
@ -88,6 +88,6 @@ impl RuleStateHolder {
|
|||
}
|
||||
});
|
||||
|
||||
return reports;
|
||||
reports
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ impl StyleHolder {
|
|||
/// NOTE: Will panic if a style is not defined for a given element
|
||||
/// If you need to process user input, use [`is_registered`]
|
||||
pub fn current(&self, style_key: &str) -> Rc<dyn ElementStyle> {
|
||||
self.styles.get(style_key).map(|rc| rc.clone()).unwrap()
|
||||
self.styles.get(style_key).cloned().unwrap()
|
||||
}
|
||||
|
||||
/// Sets the [`style`]
|
||||
|
@ -50,17 +50,17 @@ impl StyleHolder {
|
|||
#[macro_export]
|
||||
macro_rules! impl_elementstyle {
|
||||
($t:ty, $key:expr) => {
|
||||
impl crate::parser::style::ElementStyle for $t {
|
||||
impl $crate::parser::style::ElementStyle for $t {
|
||||
fn key(&self) -> &'static str { $key }
|
||||
|
||||
fn from_json(
|
||||
&self,
|
||||
json: &str,
|
||||
) -> Result<std::rc::Rc<dyn crate::parser::style::ElementStyle>, String> {
|
||||
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, String> {
|
||||
serde_json::from_str::<$t>(json)
|
||||
.map_err(|e| e.to_string())
|
||||
.map(|obj| {
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn crate::parser::style::ElementStyle>
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -68,9 +68,9 @@ macro_rules! impl_elementstyle {
|
|||
&self,
|
||||
lua: &mlua::Lua,
|
||||
value: mlua::Value,
|
||||
) -> Result<std::rc::Rc<dyn crate::parser::style::ElementStyle>, mlua::Error> {
|
||||
) -> Result<std::rc::Rc<dyn $crate::parser::style::ElementStyle>, mlua::Error> {
|
||||
mlua::LuaSerdeExt::from_value::<$t>(lua, value).map(|obj| {
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn crate::parser::style::ElementStyle>
|
||||
std::rc::Rc::new(obj) as std::rc::Rc<dyn $crate::parser::style::ElementStyle>
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
|
|||
.last_element::<Paragraph>()
|
||||
.and_then(|par| {
|
||||
par.find_back(|e| e.kind() != ElemKind::Invisible)
|
||||
.and_then(|e| Some(e.kind() == ElemKind::Inline))
|
||||
.map(|e| e.kind() == ElemKind::Inline)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
|
@ -79,12 +79,12 @@ pub fn process_text(document: &dyn Document, content: &str) -> String {
|
|||
}
|
||||
}
|
||||
|
||||
return (out + g, Some(g));
|
||||
(out + g, Some(g))
|
||||
})
|
||||
.0
|
||||
.to_string();
|
||||
|
||||
return processed;
|
||||
processed
|
||||
}
|
||||
|
||||
/// Processed a string and escapes a single token out of it
|
||||
|
@ -111,7 +111,7 @@ pub fn process_escaped<S: AsRef<str>>(escape: char, token: &'static str, content
|
|||
escaped += 1;
|
||||
} else if escaped % 2 == 1 && token_it.peek().map_or(false, |p| *p == c) {
|
||||
let _ = token_it.next();
|
||||
if token_it.peek() == None {
|
||||
if token_it.peek().is_none() {
|
||||
(0..(escaped / 2)).for_each(|_| processed.push(escape));
|
||||
escaped = 0;
|
||||
token_it = token.chars().peekable();
|
||||
|
@ -333,9 +333,8 @@ impl PropertyParser {
|
|||
escaped = 0;
|
||||
in_name = true;
|
||||
|
||||
if let Err(e) = try_insert(&name, &value) {
|
||||
return Err(e);
|
||||
}
|
||||
try_insert(&name, &value)?;
|
||||
|
||||
name.clear();
|
||||
value.clear();
|
||||
} else {
|
||||
|
@ -361,9 +360,7 @@ impl PropertyParser {
|
|||
return Err("Expected non empty property list.".to_string());
|
||||
}
|
||||
|
||||
if let Err(e) = try_insert(&name, &value) {
|
||||
return Err(e);
|
||||
}
|
||||
try_insert(&name, &value)?;
|
||||
|
||||
if let Err(e) = self.properties.iter().try_for_each(|(key, prop)| {
|
||||
if !properties.properties.contains_key(key) {
|
||||
|
|
Loading…
Reference in a new issue