Fixed blockquote
This commit is contained in:
parent
46d579247b
commit
07970d2745
2 changed files with 132 additions and 120 deletions
|
@ -103,6 +103,8 @@ impl NavEntries {
|
||||||
std::cmp::Ordering::Greater
|
std::cmp::Ordering::Greater
|
||||||
} else if rp.as_str() == left_title {
|
} else if rp.as_str() == left_title {
|
||||||
std::cmp::Ordering::Less
|
std::cmp::Ordering::Less
|
||||||
|
} else if rp.as_str() == lp.as_str() {
|
||||||
|
left_title.cmp(right_title)
|
||||||
} else {
|
} else {
|
||||||
Self::sort_entry(entrymap, lp.as_str(), rp.as_str())
|
Self::sort_entry(entrymap, lp.as_str(), rp.as_str())
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,7 +76,12 @@ impl Element for Blockquote {
|
||||||
|
|
||||||
fn element_name(&self) -> &'static str { "Blockquote" }
|
fn element_name(&self) -> &'static str { "Blockquote" }
|
||||||
|
|
||||||
fn compile(&self, compiler: &Compiler, document: &dyn Document, cursor: usize) -> Result<String, String> {
|
fn compile(
|
||||||
|
&self,
|
||||||
|
compiler: &Compiler,
|
||||||
|
document: &dyn Document,
|
||||||
|
cursor: usize,
|
||||||
|
) -> Result<String, String> {
|
||||||
match compiler.target() {
|
match compiler.target() {
|
||||||
HTML => {
|
HTML => {
|
||||||
let mut result = r#"<div class="blockquote-content">"#.to_string();
|
let mut result = r#"<div class="blockquote-content">"#.to_string();
|
||||||
|
@ -124,7 +129,9 @@ impl Element for Blockquote {
|
||||||
|
|
||||||
result += "<p>";
|
result += "<p>";
|
||||||
for elem in &self.content {
|
for elem in &self.content {
|
||||||
result += elem.compile(compiler, document, cursor+result.len())?.as_str();
|
result += elem
|
||||||
|
.compile(compiler, document, cursor + result.len())?
|
||||||
|
.as_str();
|
||||||
}
|
}
|
||||||
result += "</p></blockquote>";
|
result += "</p></blockquote>";
|
||||||
if self.style.author_pos == After {
|
if self.style.author_pos == After {
|
||||||
|
@ -215,7 +222,8 @@ impl Rule for BlockquoteRule {
|
||||||
|
|
||||||
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
fn next_match(&self, _state: &ParserState, cursor: &Cursor) -> Option<(usize, Box<dyn Any>)> {
|
||||||
self.start_re
|
self.start_re
|
||||||
.find_at(cursor.source.content(), cursor.pos).map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
.find_at(cursor.source.content(), cursor.pos)
|
||||||
|
.map(|m| (m.start(), Box::new([false; 0]) as Box<dyn Any>))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_match<'a>(
|
fn on_match<'a>(
|
||||||
|
@ -229,10 +237,9 @@ impl Rule for BlockquoteRule {
|
||||||
|
|
||||||
let content = cursor.source.content();
|
let content = cursor.source.content();
|
||||||
let mut end_cursor = cursor.clone();
|
let mut end_cursor = cursor.clone();
|
||||||
loop {
|
|
||||||
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
|
if let Some(captures) = self.start_re.captures_at(content, end_cursor.pos) {
|
||||||
if captures.get(0).unwrap().start() != end_cursor.pos {
|
if captures.get(0).unwrap().start() != end_cursor.pos {
|
||||||
break;
|
return (end_cursor, reports);
|
||||||
}
|
}
|
||||||
// Advance cursor
|
// Advance cursor
|
||||||
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
|
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
|
||||||
|
@ -258,7 +265,7 @@ impl Rule for BlockquoteRule {
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
break;
|
return (end_cursor, reports);
|
||||||
}
|
}
|
||||||
Ok(props) => (author, cite, url) = props,
|
Ok(props) => (author, cite, url) = props,
|
||||||
}
|
}
|
||||||
|
@ -269,6 +276,9 @@ impl Rule for BlockquoteRule {
|
||||||
let mut entry_content = captures.get(2).unwrap().as_str().to_string();
|
let mut entry_content = captures.get(2).unwrap().as_str().to_string();
|
||||||
let mut spacing: Option<(Range<usize>, &str)> = None;
|
let mut spacing: Option<(Range<usize>, &str)> = None;
|
||||||
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
|
while let Some(captures) = self.continue_re.captures_at(content, end_cursor.pos) {
|
||||||
|
if captures.get(0).unwrap().start() != end_cursor.pos {
|
||||||
|
break;
|
||||||
|
}
|
||||||
// Advance cursor
|
// Advance cursor
|
||||||
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
|
end_cursor = end_cursor.at(captures.get(0).unwrap().end());
|
||||||
|
|
||||||
|
@ -326,7 +336,7 @@ impl Rule for BlockquoteRule {
|
||||||
)
|
)
|
||||||
.finish(),
|
.finish(),
|
||||||
);
|
);
|
||||||
break;
|
return (end_cursor, reports);
|
||||||
}
|
}
|
||||||
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
|
Ok(mut paragraph) => std::mem::take(&mut paragraph.content),
|
||||||
};
|
};
|
||||||
|
@ -343,10 +353,7 @@ impl Rule for BlockquoteRule {
|
||||||
state.push(
|
state.push(
|
||||||
document,
|
document,
|
||||||
Box::new(Blockquote {
|
Box::new(Blockquote {
|
||||||
location: Token::new(
|
location: Token::new(entry_start..end_cursor.pos, end_cursor.source.clone()),
|
||||||
entry_start..end_cursor.pos,
|
|
||||||
end_cursor.source.clone(),
|
|
||||||
),
|
|
||||||
content: parsed_content,
|
content: parsed_content,
|
||||||
author,
|
author,
|
||||||
cite,
|
cite,
|
||||||
|
@ -354,9 +361,6 @@ impl Rule for BlockquoteRule {
|
||||||
style,
|
style,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
(end_cursor, reports)
|
(end_cursor, reports)
|
||||||
|
@ -426,6 +430,8 @@ BEFORE
|
||||||
> contin**ued here
|
> contin**ued here
|
||||||
> **
|
> **
|
||||||
AFTER
|
AFTER
|
||||||
|
> Another quote
|
||||||
|
END
|
||||||
"#
|
"#
|
||||||
.to_string(),
|
.to_string(),
|
||||||
None,
|
None,
|
||||||
|
@ -446,6 +452,10 @@ AFTER
|
||||||
Style;
|
Style;
|
||||||
};
|
};
|
||||||
Paragraph { Text{ content == "AFTER" }; };
|
Paragraph { Text{ content == "AFTER" }; };
|
||||||
|
Blockquote {
|
||||||
|
Text { content == "Another quote" };
|
||||||
|
};
|
||||||
|
Paragraph { Text{ content == "END" }; };
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue