Auto merge of #4029 - phansch:update_pulldown_cmark, r=oli-obk

Update pulldown_cmark to 0.5

We now no longer have to use our own wrapper around `Parser` and can use
the new `OffsetIter`.

changelog: none
This commit is contained in:
bors 2019-04-25 09:16:46 +00:00
commit 8c0e038f6f
2 changed files with 24 additions and 41 deletions

View File

@ -28,7 +28,7 @@ serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
toml = "0.4" toml = "0.4"
unicode-normalization = "0.1" unicode-normalization = "0.1"
pulldown-cmark = "0.2" pulldown-cmark = "0.5.0"
url = "1.7.0" url = "1.7.0"
if_chain = "0.1.3" if_chain = "0.1.3"
smallvec = { version = "0.6.5", features = ["union"] } smallvec = { version = "0.6.5", features = ["union"] }

View File

@ -4,6 +4,7 @@ use pulldown_cmark;
use rustc::lint::{EarlyContext, EarlyLintPass, LintArray, LintPass}; use rustc::lint::{EarlyContext, EarlyLintPass, LintArray, LintPass};
use rustc::{declare_tool_lint, impl_lint_pass}; use rustc::{declare_tool_lint, impl_lint_pass};
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use std::ops::Range;
use syntax::ast; use syntax::ast;
use syntax::source_map::{BytePos, Span}; use syntax::source_map::{BytePos, Span};
use syntax_pos::Pos; use syntax_pos::Pos;
@ -57,25 +58,6 @@ impl EarlyLintPass for DocMarkdown {
} }
} }
struct Parser<'a> {
parser: pulldown_cmark::Parser<'a>,
}
impl<'a> Parser<'a> {
fn new(parser: pulldown_cmark::Parser<'a>) -> Self {
Self { parser }
}
}
impl<'a> Iterator for Parser<'a> {
type Item = (usize, pulldown_cmark::Event<'a>);
fn next(&mut self) -> Option<Self::Item> {
let offset = self.parser.get_offset();
self.parser.next().map(|event| (offset, event))
}
}
/// Cleanup documentation decoration (`///` and such). /// Cleanup documentation decoration (`///` and such).
/// ///
/// We can't use `syntax::attr::AttributeMethods::with_desugared_doc` or /// We can't use `syntax::attr::AttributeMethods::with_desugared_doc` or
@ -159,30 +141,31 @@ pub fn check_attrs<'a>(cx: &EarlyContext<'_>, valid_idents: &FxHashSet<String>,
} }
if !doc.is_empty() { if !doc.is_empty() {
let parser = Parser::new(pulldown_cmark::Parser::new(&doc)); let parser = pulldown_cmark::Parser::new(&doc).into_offset_iter();
let parser = parser.coalesce(|x, y| { // Iterate over all `Events` and combine consecutive events into one
let events = parser.coalesce(|previous, current| {
use pulldown_cmark::Event::*; use pulldown_cmark::Event::*;
let x_offset = x.0; let previous_range = previous.1;
let y_offset = y.0; let current_range = current.1;
match (x.1, y.1) { match (previous.0, current.0) {
(Text(x), Text(y)) => { (Text(previous), Text(current)) => {
let mut x = x.into_owned(); let mut previous = previous.to_string();
x.push_str(&y); previous.push_str(&current);
Ok((x_offset, Text(x.into()))) Ok((Text(previous.into()), previous_range))
}, },
(x, y) => Err(((x_offset, x), (y_offset, y))), (previous, current) => Err(((previous, previous_range), (current, current_range))),
} }
}); });
check_doc(cx, valid_idents, parser, &spans); check_doc(cx, valid_idents, events, &spans);
} }
} }
fn check_doc<'a, Events: Iterator<Item = (usize, pulldown_cmark::Event<'a>)>>( fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize>)>>(
cx: &EarlyContext<'_>, cx: &EarlyContext<'_>,
valid_idents: &FxHashSet<String>, valid_idents: &FxHashSet<String>,
docs: Events, events: Events,
spans: &[(usize, Span)], spans: &[(usize, Span)],
) { ) {
use pulldown_cmark::Event::*; use pulldown_cmark::Event::*;
@ -191,15 +174,15 @@ fn check_doc<'a, Events: Iterator<Item = (usize, pulldown_cmark::Event<'a>)>>(
let mut in_code = false; let mut in_code = false;
let mut in_link = None; let mut in_link = None;
for (offset, event) in docs { for (event, range) in events {
match event { match event {
Start(CodeBlock(_)) | Start(Code) => in_code = true, Start(CodeBlock(_)) => in_code = true,
End(CodeBlock(_)) | End(Code) => in_code = false, End(CodeBlock(_)) => in_code = false,
Start(Link(link, _)) => in_link = Some(link), Start(Link(_, url, _)) => in_link = Some(url),
End(Link(_, _)) => in_link = None, End(Link(..)) => in_link = None,
Start(_tag) | End(_tag) => (), // We don't care about other tags Start(_tag) | End(_tag) => (), // We don't care about other tags
Html(_html) | InlineHtml(_html) => (), // HTML is weird, just ignore it Html(_html) | InlineHtml(_html) => (), // HTML is weird, just ignore it
SoftBreak | HardBreak => (), SoftBreak | HardBreak | TaskListMarker(_) | Code(_) => (),
FootnoteReference(text) | Text(text) => { FootnoteReference(text) | Text(text) => {
if Some(&text) == in_link.as_ref() { if Some(&text) == in_link.as_ref() {
// Probably a link of the form `<http://example.com>` // Probably a link of the form `<http://example.com>`
@ -209,7 +192,7 @@ fn check_doc<'a, Events: Iterator<Item = (usize, pulldown_cmark::Event<'a>)>>(
} }
if !in_code { if !in_code {
let index = match spans.binary_search_by(|c| c.0.cmp(&offset)) { let index = match spans.binary_search_by(|c| c.0.cmp(&range.start)) {
Ok(o) => o, Ok(o) => o,
Err(e) => e - 1, Err(e) => e - 1,
}; };
@ -217,7 +200,7 @@ fn check_doc<'a, Events: Iterator<Item = (usize, pulldown_cmark::Event<'a>)>>(
let (begin, span) = spans[index]; let (begin, span) = spans[index];
// Adjust for the beginning of the current `Event` // Adjust for the beginning of the current `Event`
let span = span.with_lo(span.lo() + BytePos::from_usize(offset - begin)); let span = span.with_lo(span.lo() + BytePos::from_usize(range.start - begin));
check_text(cx, valid_idents, &text, span); check_text(cx, valid_idents, &text, span);
} }