Auto merge of #41121 - frewsxcv:rollup, r=frewsxcv
Rollup of 8 pull requests - Successful merges: #40878, #40976, #41089, #41090, #41108, #41111, #41112, #41114 - Failed merges:
This commit is contained in:
commit
2277f4bdcc
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -25,4 +25,4 @@
|
||||
url = https://github.com/rust-lang-nursery/reference.git
|
||||
[submodule "book"]
|
||||
path = src/doc/book
|
||||
url = https://github.com/rust-lang/book
|
||||
url = https://github.com/rust-lang/book.git
|
||||
|
@ -15,11 +15,14 @@ set -ex
|
||||
export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
|
||||
export CXXFLAGS="-Wa,-mrelax-relocations=no"
|
||||
|
||||
MUSL=musl-1.1.14
|
||||
MUSL=musl-1.1.16
|
||||
curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
|
||||
cd $MUSL
|
||||
CFLAGS="$CFLAGS -m32" ./configure --prefix=/musl-i686 --disable-shared --target=i686
|
||||
make -j10
|
||||
CC=gcc \
|
||||
CFLAGS="$CFLAGS -m32" \
|
||||
./configure --prefix=/musl-i686 --disable-shared \
|
||||
--target=i686
|
||||
make AR=ar RANLIB=ranlib -j10
|
||||
make install
|
||||
cd ..
|
||||
|
||||
|
@ -15,7 +15,7 @@ set -ex
|
||||
export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
|
||||
export CXXFLAGS="-Wa,-mrelax-relocations=no"
|
||||
|
||||
MUSL=musl-1.1.14
|
||||
MUSL=musl-1.1.16
|
||||
curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
|
||||
cd $MUSL
|
||||
./configure --prefix=/musl-x86_64 --disable-shared
|
||||
|
@ -1346,7 +1346,7 @@ impl<T: PartialEq> Vec<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
///# #![feature(vec_remove_item)]
|
||||
/// # #![feature(vec_remove_item)]
|
||||
/// let mut vec = vec![1, 2, 3, 1];
|
||||
///
|
||||
/// vec.remove_item(&1);
|
||||
|
@ -153,8 +153,9 @@ unsafe impl<T> Sync for AtomicPtr<T> {}
|
||||
/// Rust's memory orderings are [the same as
|
||||
/// LLVM's](http://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations).
|
||||
///
|
||||
/// For more information see the [nomicon][1].
|
||||
/// [1]: ../../../nomicon/atomics.html
|
||||
/// For more information see the [nomicon].
|
||||
///
|
||||
/// [nomicon]: ../../../nomicon/atomics.html
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Ordering {
|
||||
|
@ -394,6 +394,10 @@ impl Definitions {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_to_hir_id(&self, node_id: ast::NodeId) -> hir::HirId {
|
||||
self.node_to_hir_id[node_id]
|
||||
}
|
||||
|
||||
/// Add a definition with a parent definition.
|
||||
pub fn create_def_with_parent(&mut self,
|
||||
parent: Option<DefIndex>,
|
||||
|
300
src/librustc/ich/hcx.rs
Normal file
300
src/librustc/ich/hcx.rs
Normal file
@ -0,0 +1,300 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use hir;
|
||||
use hir::def_id::DefId;
|
||||
use ich::{self, CachingCodemapView, DefPathHashes};
|
||||
use session::config::DebugInfoLevel::NoDebugInfo;
|
||||
use ty;
|
||||
|
||||
use std::hash as std_hash;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::ext::hygiene::SyntaxContext;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
|
||||
/// This is the context state available during incr. comp. hashing. It contains
|
||||
/// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
|
||||
/// a reference to the TyCtxt) and it holds a few caches for speeding up various
|
||||
/// things (e.g. each DefId/DefPath is only hashed once).
|
||||
pub struct StableHashingContext<'a, 'tcx: 'a> {
|
||||
tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_path_hashes: DefPathHashes<'a, 'tcx>,
|
||||
codemap: CachingCodemapView<'tcx>,
|
||||
hash_spans: bool,
|
||||
hash_bodies: bool,
|
||||
overflow_checks_enabled: bool,
|
||||
node_id_hashing_mode: NodeIdHashingMode,
|
||||
// A sorted array of symbol keys for fast lookup.
|
||||
ignored_attr_names: Vec<Symbol>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum NodeIdHashingMode {
|
||||
Ignore,
|
||||
HashDefPath,
|
||||
HashTraitsInScope,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> StableHashingContext<'a, 'tcx> {
|
||||
|
||||
pub fn new(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Self {
|
||||
let hash_spans_initial = tcx.sess.opts.debuginfo != NoDebugInfo;
|
||||
let check_overflow_initial = tcx.sess.overflow_checks();
|
||||
|
||||
let mut ignored_attr_names: Vec<_> = ich::IGNORED_ATTRIBUTES
|
||||
.iter()
|
||||
.map(|&s| Symbol::intern(s))
|
||||
.collect();
|
||||
|
||||
ignored_attr_names.sort();
|
||||
|
||||
StableHashingContext {
|
||||
tcx: tcx,
|
||||
def_path_hashes: DefPathHashes::new(tcx),
|
||||
codemap: CachingCodemapView::new(tcx),
|
||||
hash_spans: hash_spans_initial,
|
||||
hash_bodies: true,
|
||||
overflow_checks_enabled: check_overflow_initial,
|
||||
node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
|
||||
ignored_attr_names: ignored_attr_names,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
|
||||
hash_bodies: bool,
|
||||
f: F) {
|
||||
let prev_hash_bodies = self.hash_bodies;
|
||||
self.hash_bodies = hash_bodies;
|
||||
f(self);
|
||||
self.hash_bodies = prev_hash_bodies;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn while_hashing_spans<F: FnOnce(&mut Self)>(&mut self,
|
||||
hash_spans: bool,
|
||||
f: F) {
|
||||
let prev_hash_spans = self.hash_spans;
|
||||
self.hash_spans = hash_spans;
|
||||
f(self);
|
||||
self.hash_spans = prev_hash_spans;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn with_node_id_hashing_mode<F: FnOnce(&mut Self)>(&mut self,
|
||||
mode: NodeIdHashingMode,
|
||||
f: F) {
|
||||
let prev = self.node_id_hashing_mode;
|
||||
self.node_id_hashing_mode = mode;
|
||||
f(self);
|
||||
self.node_id_hashing_mode = prev;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn def_path_hash(&mut self, def_id: DefId) -> u64 {
|
||||
self.def_path_hashes.hash(def_id)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn hash_spans(&self) -> bool {
|
||||
self.hash_spans
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn hash_bodies(&self) -> bool {
|
||||
self.hash_bodies
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn codemap(&mut self) -> &mut CachingCodemapView<'tcx> {
|
||||
&mut self.codemap
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_ignored_attr(&self, name: Symbol) -> bool {
|
||||
self.ignored_attr_names.binary_search(&name).is_ok()
|
||||
}
|
||||
|
||||
pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self,
|
||||
item_attrs: &[ast::Attribute],
|
||||
f: F) {
|
||||
let prev_overflow_checks = self.overflow_checks_enabled;
|
||||
if attr::contains_name(item_attrs, "rustc_inherit_overflow_checks") {
|
||||
self.overflow_checks_enabled = true;
|
||||
}
|
||||
let prev_hash_node_ids = self.node_id_hashing_mode;
|
||||
self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
|
||||
|
||||
f(self);
|
||||
|
||||
self.node_id_hashing_mode = prev_hash_node_ids;
|
||||
self.overflow_checks_enabled = prev_overflow_checks;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn binop_can_panic_at_runtime(&self, binop: hir::BinOp_) -> bool
|
||||
{
|
||||
match binop {
|
||||
hir::BiAdd |
|
||||
hir::BiSub |
|
||||
hir::BiMul => self.overflow_checks_enabled,
|
||||
|
||||
hir::BiDiv |
|
||||
hir::BiRem => true,
|
||||
|
||||
hir::BiAnd |
|
||||
hir::BiOr |
|
||||
hir::BiBitXor |
|
||||
hir::BiBitAnd |
|
||||
hir::BiBitOr |
|
||||
hir::BiShl |
|
||||
hir::BiShr |
|
||||
hir::BiEq |
|
||||
hir::BiLt |
|
||||
hir::BiLe |
|
||||
hir::BiNe |
|
||||
hir::BiGe |
|
||||
hir::BiGt => false
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn unop_can_panic_at_runtime(&self, unop: hir::UnOp) -> bool
|
||||
{
|
||||
match unop {
|
||||
hir::UnDeref |
|
||||
hir::UnNot => false,
|
||||
hir::UnNeg => self.overflow_checks_enabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ast::NodeId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
match hcx.node_id_hashing_mode {
|
||||
NodeIdHashingMode::Ignore => {
|
||||
// Most NodeIds in the HIR can be ignored, but if there is a
|
||||
// corresponding entry in the `trait_map` we need to hash that.
|
||||
// Make sure we don't ignore too much by checking that there is
|
||||
// no entry in a debug_assert!().
|
||||
debug_assert!(hcx.tcx.trait_map.get(self).is_none());
|
||||
}
|
||||
NodeIdHashingMode::HashDefPath => {
|
||||
hcx.tcx.hir.definitions().node_to_hir_id(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
NodeIdHashingMode::HashTraitsInScope => {
|
||||
if let Some(traits) = hcx.tcx.trait_map.get(self) {
|
||||
// The ordering of the candidates is not fixed. So we hash
|
||||
// the def-ids and then sort them and hash the collection.
|
||||
let mut candidates: AccumulateVec<[_; 8]> =
|
||||
traits.iter()
|
||||
.map(|&hir::TraitCandidate { def_id, import_id: _ }| {
|
||||
hcx.def_path_hash(def_id)
|
||||
})
|
||||
.collect();
|
||||
if traits.len() > 1 {
|
||||
candidates.sort();
|
||||
}
|
||||
candidates.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for Span {
|
||||
|
||||
// Hash a span in a stable way. We can't directly hash the span's BytePos
|
||||
// fields (that would be similar to hashing pointers, since those are just
|
||||
// offsets into the CodeMap). Instead, we hash the (file name, line, column)
|
||||
// triple, which stays the same even if the containing FileMap has moved
|
||||
// within the CodeMap.
|
||||
// Also note that we are hashing byte offsets for the column, not unicode
|
||||
// codepoint offsets. For the purpose of the hash that's sufficient.
|
||||
// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||
// span starts and ends in the same file, which is almost always the case.
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use syntax_pos::Pos;
|
||||
|
||||
if !hcx.hash_spans {
|
||||
return
|
||||
}
|
||||
|
||||
// If this is not an empty or invalid span, we want to hash the last
|
||||
// position that belongs to it, as opposed to hashing the first
|
||||
// position past it.
|
||||
let span_hi = if self.hi > self.lo {
|
||||
// We might end up in the middle of a multibyte character here,
|
||||
// but that's OK, since we are not trying to decode anything at
|
||||
// this position.
|
||||
self.hi - ::syntax_pos::BytePos(1)
|
||||
} else {
|
||||
self.hi
|
||||
};
|
||||
|
||||
{
|
||||
let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo);
|
||||
let loc1 = loc1.as_ref()
|
||||
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
|
||||
.unwrap_or(("???", 0, 0));
|
||||
|
||||
let loc2 = hcx.codemap().byte_pos_to_line_and_col(span_hi);
|
||||
let loc2 = loc2.as_ref()
|
||||
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
|
||||
.unwrap_or(("???", 0, 0));
|
||||
|
||||
if loc1.0 == loc2.0 {
|
||||
std_hash::Hash::hash(&0u8, hasher);
|
||||
|
||||
std_hash::Hash::hash(loc1.0, hasher);
|
||||
std_hash::Hash::hash(&loc1.1, hasher);
|
||||
std_hash::Hash::hash(&loc1.2, hasher);
|
||||
|
||||
// Do not hash the file name twice
|
||||
std_hash::Hash::hash(&loc2.1, hasher);
|
||||
std_hash::Hash::hash(&loc2.2, hasher);
|
||||
} else {
|
||||
std_hash::Hash::hash(&1u8, hasher);
|
||||
|
||||
std_hash::Hash::hash(loc1.0, hasher);
|
||||
std_hash::Hash::hash(&loc1.1, hasher);
|
||||
std_hash::Hash::hash(&loc1.2, hasher);
|
||||
|
||||
std_hash::Hash::hash(loc2.0, hasher);
|
||||
std_hash::Hash::hash(&loc2.1, hasher);
|
||||
std_hash::Hash::hash(&loc2.2, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
if self.ctxt == SyntaxContext::empty() {
|
||||
0u8.hash_stable(hcx, hasher);
|
||||
} else {
|
||||
1u8.hash_stable(hcx, hasher);
|
||||
self.source_callsite().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
71
src/librustc/ich/impls_const_math.rs
Normal file
71
src/librustc/ich/impls_const_math.rs
Normal file
@ -0,0 +1,71 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! from `rustc_const_math` in no particular order.
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::ConstFloat {
|
||||
F32(val),
|
||||
F64(val)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::ConstInt {
|
||||
I8(val),
|
||||
I16(val),
|
||||
I32(val),
|
||||
I64(val),
|
||||
I128(val),
|
||||
Isize(val),
|
||||
U8(val),
|
||||
U16(val),
|
||||
U32(val),
|
||||
U64(val),
|
||||
U128(val),
|
||||
Usize(val)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::ConstIsize {
|
||||
Is16(i16),
|
||||
Is32(i32),
|
||||
Is64(i64)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::ConstUsize {
|
||||
Us16(i16),
|
||||
Us32(i32),
|
||||
Us64(i64)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::ConstMathErr {
|
||||
NotInRange,
|
||||
CmpBetweenUnequalTypes,
|
||||
UnequalTypes(op),
|
||||
Overflow(op),
|
||||
ShiftNegative,
|
||||
DivisionByZero,
|
||||
RemainderByZero,
|
||||
UnsignedNegation,
|
||||
ULitOutOfRange(int_ty),
|
||||
LitOutOfRange(int_ty)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_const_math::Op {
|
||||
Add,
|
||||
Sub,
|
||||
Mul,
|
||||
Div,
|
||||
Rem,
|
||||
Shr,
|
||||
Shl,
|
||||
Neg,
|
||||
BitAnd,
|
||||
BitOr,
|
||||
BitXor
|
||||
});
|
1104
src/librustc/ich/impls_hir.rs
Normal file
1104
src/librustc/ich/impls_hir.rs
Normal file
File diff suppressed because it is too large
Load Diff
407
src/librustc/ich/impls_mir.rs
Normal file
407
src/librustc/ich/impls_mir.rs
Normal file
@ -0,0 +1,407 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This module contains `HashStable` implementations for various MIR data
|
||||
//! types in no particular order.
|
||||
|
||||
use ich::StableHashingContext;
|
||||
use mir;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use std::mem;
|
||||
|
||||
|
||||
impl_stable_hash_for!(struct mir::SourceInfo { span, scope });
|
||||
impl_stable_hash_for!(enum mir::Mutability { Mut, Not });
|
||||
impl_stable_hash_for!(enum mir::BorrowKind { Shared, Unique, Mut });
|
||||
impl_stable_hash_for!(enum mir::LocalKind { Var, Temp, Arg, ReturnPointer });
|
||||
impl_stable_hash_for!(struct mir::LocalDecl<'tcx> { mutability, ty, name, source_info });
|
||||
impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref });
|
||||
impl_stable_hash_for!(struct mir::BasicBlockData<'tcx> { statements, terminator, is_cleanup });
|
||||
impl_stable_hash_for!(struct mir::Terminator<'tcx> { source_info, kind });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Local {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::BasicBlock {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Field {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::VisibilityScope {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Promoted {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::TerminatorKind<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
mir::TerminatorKind::Goto { ref target } => {
|
||||
target.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::TerminatorKind::SwitchInt { ref discr,
|
||||
switch_ty,
|
||||
ref values,
|
||||
ref targets } => {
|
||||
discr.hash_stable(hcx, hasher);
|
||||
switch_ty.hash_stable(hcx, hasher);
|
||||
values.hash_stable(hcx, hasher);
|
||||
targets.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::TerminatorKind::Resume |
|
||||
mir::TerminatorKind::Return |
|
||||
mir::TerminatorKind::Unreachable => {}
|
||||
mir::TerminatorKind::Drop { ref location, target, unwind } => {
|
||||
location.hash_stable(hcx, hasher);
|
||||
target.hash_stable(hcx, hasher);
|
||||
unwind.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::TerminatorKind::DropAndReplace { ref location,
|
||||
ref value,
|
||||
target,
|
||||
unwind, } => {
|
||||
location.hash_stable(hcx, hasher);
|
||||
value.hash_stable(hcx, hasher);
|
||||
target.hash_stable(hcx, hasher);
|
||||
unwind.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::TerminatorKind::Call { ref func,
|
||||
ref args,
|
||||
ref destination,
|
||||
cleanup } => {
|
||||
func.hash_stable(hcx, hasher);
|
||||
args.hash_stable(hcx, hasher);
|
||||
destination.hash_stable(hcx, hasher);
|
||||
cleanup.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::TerminatorKind::Assert { ref cond,
|
||||
expected,
|
||||
ref msg,
|
||||
target,
|
||||
cleanup } => {
|
||||
cond.hash_stable(hcx, hasher);
|
||||
expected.hash_stable(hcx, hasher);
|
||||
msg.hash_stable(hcx, hasher);
|
||||
target.hash_stable(hcx, hasher);
|
||||
cleanup.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::AssertMessage<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
mir::AssertMessage::BoundsCheck { ref len, ref index } => {
|
||||
len.hash_stable(hcx, hasher);
|
||||
index.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::AssertMessage::Math(ref const_math_err) => {
|
||||
const_math_err.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct mir::Statement<'tcx> { source_info, kind });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::StatementKind<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
rvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::StatementKind::SetDiscriminant { ref lvalue, variant_index } => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
variant_index.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::StatementKind::StorageLive(ref lvalue) |
|
||||
mir::StatementKind::StorageDead(ref lvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::StatementKind::Nop => {}
|
||||
mir::StatementKind::InlineAsm { ref asm, ref outputs, ref inputs } => {
|
||||
asm.hash_stable(hcx, hasher);
|
||||
outputs.hash_stable(hcx, hasher);
|
||||
inputs.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Lvalue<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
mir::Lvalue::Local(ref local) => {
|
||||
local.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Lvalue::Static(ref statik) => {
|
||||
statik.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Lvalue::Projection(ref lvalue_projection) => {
|
||||
lvalue_projection.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, B, V> HashStable<StableHashingContext<'a, 'tcx>> for mir::Projection<'tcx, B, V>
|
||||
where B: HashStable<StableHashingContext<'a, 'tcx>>,
|
||||
V: HashStable<StableHashingContext<'a, 'tcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mir::Projection {
|
||||
ref base,
|
||||
ref elem,
|
||||
} = *self;
|
||||
|
||||
base.hash_stable(hcx, hasher);
|
||||
elem.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, V> HashStable<StableHashingContext<'a, 'tcx>> for mir::ProjectionElem<'tcx, V>
|
||||
where V: HashStable<StableHashingContext<'a, 'tcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
mir::ProjectionElem::Deref => {}
|
||||
mir::ProjectionElem::Field(field, ty) => {
|
||||
field.hash_stable(hcx, hasher);
|
||||
ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::ProjectionElem::Index(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
|
||||
offset.hash_stable(hcx, hasher);
|
||||
min_length.hash_stable(hcx, hasher);
|
||||
from_end.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::ProjectionElem::Subslice { from, to } => {
|
||||
from.hash_stable(hcx, hasher);
|
||||
to.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::ProjectionElem::Downcast(adt_def, variant) => {
|
||||
adt_def.hash_stable(hcx, hasher);
|
||||
variant.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct mir::VisibilityScopeData { span, parent_scope });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Operand<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
mir::Operand::Consume(ref lvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Operand::Constant(ref constant) => {
|
||||
constant.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Rvalue<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
mir::Rvalue::Use(ref operand) => {
|
||||
operand.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Repeat(ref operand, ref val) => {
|
||||
operand.hash_stable(hcx, hasher);
|
||||
val.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Ref(region, borrow_kind, ref lvalue) => {
|
||||
region.hash_stable(hcx, hasher);
|
||||
borrow_kind.hash_stable(hcx, hasher);
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Len(ref lvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Cast(cast_kind, ref operand, ty) => {
|
||||
cast_kind.hash_stable(hcx, hasher);
|
||||
operand.hash_stable(hcx, hasher);
|
||||
ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::BinaryOp(op, ref operand1, ref operand2) |
|
||||
mir::Rvalue::CheckedBinaryOp(op, ref operand1, ref operand2) => {
|
||||
op.hash_stable(hcx, hasher);
|
||||
operand1.hash_stable(hcx, hasher);
|
||||
operand2.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::UnaryOp(op, ref operand) => {
|
||||
op.hash_stable(hcx, hasher);
|
||||
operand.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Discriminant(ref lvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Box(ty) => {
|
||||
ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Aggregate(ref kind, ref operands) => {
|
||||
kind.hash_stable(hcx, hasher);
|
||||
operands.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum mir::CastKind {
|
||||
Misc,
|
||||
ReifyFnPointer,
|
||||
ClosureFnPointer,
|
||||
UnsafeFnPointer,
|
||||
Unsize
|
||||
});
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::AggregateKind<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
mir::AggregateKind::Tuple => {}
|
||||
mir::AggregateKind::Array(t) => {
|
||||
t.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::AggregateKind::Adt(adt_def, idx, substs, active_field) => {
|
||||
adt_def.hash_stable(hcx, hasher);
|
||||
idx.hash_stable(hcx, hasher);
|
||||
substs.hash_stable(hcx, hasher);
|
||||
active_field.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::AggregateKind::Closure(def_id, ref substs) => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
substs.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum mir::BinOp {
|
||||
Add,
|
||||
Sub,
|
||||
Mul,
|
||||
Div,
|
||||
Rem,
|
||||
BitXor,
|
||||
BitAnd,
|
||||
BitOr,
|
||||
Shl,
|
||||
Shr,
|
||||
Eq,
|
||||
Lt,
|
||||
Le,
|
||||
Ne,
|
||||
Ge,
|
||||
Gt
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum mir::UnOp {
|
||||
Not,
|
||||
Neg
|
||||
});
|
||||
|
||||
|
||||
impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Literal<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
mir::Literal::Item { def_id, substs } => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
substs.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Literal::Value { ref value } => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Literal::Promoted { index } => {
|
||||
index.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct mir::Location { block, statement_index });
|
301
src/librustc/ich/impls_syntax.rs
Normal file
301
src/librustc/ich/impls_syntax.rs
Normal file
@ -0,0 +1,301 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! from libsyntax in no particular order.
|
||||
|
||||
use ich::StableHashingContext;
|
||||
|
||||
use std::hash as std_hash;
|
||||
use std::mem;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::tokenstream;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::syntax::symbol::InternedString {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let s: &str = &**self;
|
||||
s.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ast::Name {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.as_str().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::AsmDialect {
|
||||
Att,
|
||||
Intel
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ext::base::MacroKind {
|
||||
Bang,
|
||||
Attr,
|
||||
Derive
|
||||
});
|
||||
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::abi::Abi {
|
||||
Cdecl,
|
||||
Stdcall,
|
||||
Fastcall,
|
||||
Vectorcall,
|
||||
Aapcs,
|
||||
Win64,
|
||||
SysV64,
|
||||
PtxKernel,
|
||||
Msp430Interrupt,
|
||||
X86Interrupt,
|
||||
Rust,
|
||||
C,
|
||||
System,
|
||||
RustIntrinsic,
|
||||
RustCall,
|
||||
PlatformIntrinsic,
|
||||
Unadjusted
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::syntax::attr::Deprecation { since, note });
|
||||
impl_stable_hash_for!(struct ::syntax::attr::Stability { level, feature, rustc_depr });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::syntax::attr::StabilityLevel {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
::syntax::attr::StabilityLevel::Unstable { ref reason, ref issue } => {
|
||||
reason.hash_stable(hcx, hasher);
|
||||
issue.hash_stable(hcx, hasher);
|
||||
}
|
||||
::syntax::attr::StabilityLevel::Stable { ref since } => {
|
||||
since.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ::syntax::attr::RustcDeprecation { since, reason });
|
||||
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::attr::IntType {
|
||||
SignedInt(int_ty),
|
||||
UnsignedInt(uint_ty)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::LitIntType {
|
||||
Signed(int_ty),
|
||||
Unsigned(int_ty),
|
||||
Unsuffixed
|
||||
});
|
||||
|
||||
impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
|
||||
impl_stable_hash_for!(enum ::syntax::ast::LitKind {
|
||||
Str(value, style),
|
||||
ByteStr(value),
|
||||
Byte(value),
|
||||
Char(value),
|
||||
Int(value, lit_int_type),
|
||||
Float(value, float_ty),
|
||||
FloatUnsuffixed(value),
|
||||
Bool(value)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::IntTy { Is, I8, I16, I32, I64, I128 });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::UintTy { Us, U8, U16, U32, U64, U128 });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::FloatTy { F32, F64 });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::Unsafety { Unsafe, Normal });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::Constness { Const, NotConst });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::Defaultness { Default, Final });
|
||||
impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, span, name });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner });
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for [ast::Attribute] {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
// Some attributes are always ignored during hashing.
|
||||
let filtered: AccumulateVec<[&ast::Attribute; 8]> = self
|
||||
.iter()
|
||||
.filter(|attr| {
|
||||
!attr.is_sugared_doc &&
|
||||
attr.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true)
|
||||
})
|
||||
.collect();
|
||||
|
||||
filtered.len().hash_stable(hcx, hasher);
|
||||
for attr in filtered {
|
||||
attr.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ast::Attribute {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
// Make sure that these have been filtered out.
|
||||
debug_assert!(self.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true));
|
||||
debug_assert!(!self.is_sugared_doc);
|
||||
|
||||
let ast::Attribute {
|
||||
id: _,
|
||||
style,
|
||||
ref path,
|
||||
ref tokens,
|
||||
is_sugared_doc: _,
|
||||
span,
|
||||
} = *self;
|
||||
|
||||
style.hash_stable(hcx, hasher);
|
||||
path.segments.len().hash_stable(hcx, hasher);
|
||||
for segment in &path.segments {
|
||||
segment.identifier.name.hash_stable(hcx, hasher);
|
||||
}
|
||||
for tt in tokens.trees() {
|
||||
tt.hash_stable(hcx, hasher);
|
||||
}
|
||||
span.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for tokenstream::TokenTree {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
tokenstream::TokenTree::Token(span, ref token) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
hash_token(token, hcx, hasher, span);
|
||||
}
|
||||
tokenstream::TokenTree::Delimited(span, ref delimited) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
std_hash::Hash::hash(&delimited.delim, hasher);
|
||||
for sub_tt in delimited.stream().trees() {
|
||||
sub_tt.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for tokenstream::TokenStream {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
for sub_tt in self.trees() {
|
||||
sub_tt.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_token<'a, 'tcx, W: StableHasherResult>(token: &token::Token,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
error_reporting_span: Span) {
|
||||
mem::discriminant(token).hash_stable(hcx, hasher);
|
||||
match *token {
|
||||
token::Token::Eq |
|
||||
token::Token::Lt |
|
||||
token::Token::Le |
|
||||
token::Token::EqEq |
|
||||
token::Token::Ne |
|
||||
token::Token::Ge |
|
||||
token::Token::Gt |
|
||||
token::Token::AndAnd |
|
||||
token::Token::OrOr |
|
||||
token::Token::Not |
|
||||
token::Token::Tilde |
|
||||
token::Token::At |
|
||||
token::Token::Dot |
|
||||
token::Token::DotDot |
|
||||
token::Token::DotDotDot |
|
||||
token::Token::Comma |
|
||||
token::Token::Semi |
|
||||
token::Token::Colon |
|
||||
token::Token::ModSep |
|
||||
token::Token::RArrow |
|
||||
token::Token::LArrow |
|
||||
token::Token::FatArrow |
|
||||
token::Token::Pound |
|
||||
token::Token::Dollar |
|
||||
token::Token::Question |
|
||||
token::Token::Underscore |
|
||||
token::Token::Whitespace |
|
||||
token::Token::Comment |
|
||||
token::Token::Eof => {}
|
||||
|
||||
token::Token::BinOp(bin_op_token) |
|
||||
token::Token::BinOpEq(bin_op_token) => {
|
||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
||||
}
|
||||
|
||||
token::Token::OpenDelim(delim_token) |
|
||||
token::Token::CloseDelim(delim_token) => {
|
||||
std_hash::Hash::hash(&delim_token, hasher);
|
||||
}
|
||||
token::Token::Literal(ref lit, ref opt_name) => {
|
||||
mem::discriminant(lit).hash_stable(hcx, hasher);
|
||||
match *lit {
|
||||
token::Lit::Byte(val) |
|
||||
token::Lit::Char(val) |
|
||||
token::Lit::Integer(val) |
|
||||
token::Lit::Float(val) |
|
||||
token::Lit::Str_(val) |
|
||||
token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher),
|
||||
token::Lit::StrRaw(val, n) |
|
||||
token::Lit::ByteStrRaw(val, n) => {
|
||||
val.hash_stable(hcx, hasher);
|
||||
n.hash_stable(hcx, hasher);
|
||||
}
|
||||
};
|
||||
opt_name.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
token::Token::Ident(ident) |
|
||||
token::Token::Lifetime(ident) |
|
||||
token::Token::SubstNt(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
|
||||
token::Token::Interpolated(ref non_terminal) => {
|
||||
// FIXME(mw): This could be implemented properly. It's just a
|
||||
// lot of work, since we would need to hash the AST
|
||||
// in a stable way, in addition to the HIR.
|
||||
// Since this is hardly used anywhere, just emit a
|
||||
// warning for now.
|
||||
if hcx.tcx().sess.opts.debugging_opts.incremental.is_some() {
|
||||
let msg = format!("Quasi-quoting might make incremental \
|
||||
compilation very inefficient: {:?}",
|
||||
non_terminal);
|
||||
hcx.tcx().sess.span_warn(error_reporting_span, &msg[..]);
|
||||
}
|
||||
|
||||
std_hash::Hash::hash(non_terminal, hasher);
|
||||
}
|
||||
|
||||
token::Token::DocComment(val) |
|
||||
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||
}
|
||||
}
|
415
src/librustc/ich/impls_ty.rs
Normal file
415
src/librustc/ich/impls_ty.rs
Normal file
@ -0,0 +1,415 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! from rustc::ty in no particular order.
|
||||
|
||||
use ich::StableHashingContext;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use std::hash as std_hash;
|
||||
use std::mem;
|
||||
use ty;
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::Ty<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let type_hash = hcx.tcx().type_id_hash(*self);
|
||||
type_hash.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::ItemSubsts<'tcx> { substs });
|
||||
|
||||
impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for ty::Slice<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'tcx>> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(&**self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::subst::Kind<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.as_type().hash_stable(hcx, hasher);
|
||||
self.as_region().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::Region {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
ty::ReErased |
|
||||
ty::ReStatic |
|
||||
ty::ReEmpty => {
|
||||
// No variant fields to hash for these ...
|
||||
}
|
||||
ty::ReLateBound(db, ty::BrAnon(i)) => {
|
||||
db.depth.hash_stable(hcx, hasher);
|
||||
i.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::ReEarlyBound(ty::EarlyBoundRegion { index, name }) => {
|
||||
index.hash_stable(hcx, hasher);
|
||||
name.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::ReLateBound(..) |
|
||||
ty::ReFree(..) |
|
||||
ty::ReScope(..) |
|
||||
ty::ReVar(..) |
|
||||
ty::ReSkolemized(..) => {
|
||||
bug!("TypeIdHasher: unexpected region {:?}", *self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::adjustment::AutoBorrow<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
ty::adjustment::AutoBorrow::Ref(ref region, mutability) => {
|
||||
region.hash_stable(hcx, hasher);
|
||||
mutability.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::adjustment::AutoBorrow::RawPtr(mutability) => {
|
||||
mutability.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::adjustment::Adjust<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
ty::adjustment::Adjust::NeverToAny |
|
||||
ty::adjustment::Adjust::ReifyFnPointer |
|
||||
ty::adjustment::Adjust::UnsafeFnPointer |
|
||||
ty::adjustment::Adjust::ClosureFnPointer |
|
||||
ty::adjustment::Adjust::MutToConstPointer => {}
|
||||
ty::adjustment::Adjust::DerefRef { autoderefs, ref autoref, unsize } => {
|
||||
autoderefs.hash_stable(hcx, hasher);
|
||||
autoref.hash_stable(hcx, hasher);
|
||||
unsize.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::adjustment::Adjustment<'tcx> { kind, target });
|
||||
impl_stable_hash_for!(struct ty::MethodCall { expr_id, autoderef });
|
||||
impl_stable_hash_for!(struct ty::MethodCallee<'tcx> { def_id, ty, substs });
|
||||
impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
|
||||
impl_stable_hash_for!(struct ty::UpvarBorrow<'tcx> { kind, region });
|
||||
|
||||
impl_stable_hash_for!(enum ty::BorrowKind {
|
||||
ImmBorrow,
|
||||
UniqueImmBorrow,
|
||||
MutBorrow
|
||||
});
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::UpvarCapture<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
ty::UpvarCapture::ByValue => {}
|
||||
ty::UpvarCapture::ByRef(ref up_var_borrow) => {
|
||||
up_var_borrow.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::FnSig<'tcx> {
|
||||
inputs_and_output,
|
||||
variadic,
|
||||
unsafety,
|
||||
abi
|
||||
});
|
||||
|
||||
impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for ty::Binder<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'tcx>> + ty::fold::TypeFoldable<'tcx>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.tcx().anonymize_late_bound_regions(self).0.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ty::ClosureKind { Fn, FnMut, FnOnce });
|
||||
|
||||
impl_stable_hash_for!(enum ty::Visibility {
|
||||
Public,
|
||||
Restricted(def_id),
|
||||
Invisible
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ty::TraitRef<'tcx> { def_id, substs });
|
||||
impl_stable_hash_for!(struct ty::TraitPredicate<'tcx> { trait_ref });
|
||||
impl_stable_hash_for!(tuple_struct ty::EquatePredicate<'tcx> { t1, t2 });
|
||||
|
||||
impl<'a, 'tcx, A, B> HashStable<StableHashingContext<'a, 'tcx>> for ty::OutlivesPredicate<A, B>
|
||||
where A: HashStable<StableHashingContext<'a, 'tcx>>,
|
||||
B: HashStable<StableHashingContext<'a, 'tcx>>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::OutlivesPredicate(ref a, ref b) = *self;
|
||||
a.hash_stable(hcx, hasher);
|
||||
b.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::ProjectionPredicate<'tcx> { projection_ty, ty });
|
||||
impl_stable_hash_for!(struct ty::ProjectionTy<'tcx> { trait_ref, item_name });
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::Predicate<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
ty::Predicate::Trait(ref pred) => {
|
||||
pred.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::Equate(ref pred) => {
|
||||
pred.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::RegionOutlives(ref pred) => {
|
||||
pred.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::TypeOutlives(ref pred) => {
|
||||
pred.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::Projection(ref pred) => {
|
||||
pred.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::WellFormed(ty) => {
|
||||
ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::ObjectSafe(def_id) => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
}
|
||||
ty::Predicate::ClosureKind(def_id, closure_kind) => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
closure_kind.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::AdtFlags {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
std_hash::Hash::hash(self, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::VariantDef {
|
||||
did,
|
||||
name,
|
||||
discr,
|
||||
fields,
|
||||
ctor_kind
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::VariantDiscr {
|
||||
Explicit(def_id),
|
||||
Relative(distance)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ty::FieldDef {
|
||||
did,
|
||||
name,
|
||||
vis
|
||||
});
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>>
|
||||
for ::middle::const_val::ConstVal<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use middle::const_val::ConstVal;
|
||||
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
ConstVal::Float(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Integral(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Str(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::ByteStr(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Bool(value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Function(def_id, substs) => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
substs.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Struct(ref _name_value_map) => {
|
||||
// BTreeMap<ast::Name, ConstVal<'tcx>>),
|
||||
panic!("Ordering still unstable")
|
||||
}
|
||||
ConstVal::Tuple(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Array(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Repeat(ref value, times) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
times.hash_stable(hcx, hasher);
|
||||
}
|
||||
ConstVal::Char(value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
|
||||
|
||||
|
||||
impl_stable_hash_for!(struct ty::GenericPredicates<'tcx> {
|
||||
parent,
|
||||
predicates
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::Variance {
|
||||
Covariant,
|
||||
Invariant,
|
||||
Contravariant,
|
||||
Bivariant
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::adjustment::CustomCoerceUnsized {
|
||||
Struct(index)
|
||||
});
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::Generics {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::Generics {
|
||||
parent,
|
||||
parent_regions,
|
||||
parent_types,
|
||||
ref regions,
|
||||
ref types,
|
||||
|
||||
// Reverse map to each `TypeParameterDef`'s `index` field, from
|
||||
// `def_id.index` (`def_id.krate` is the same as the item's).
|
||||
type_param_to_index: _, // Don't hash this
|
||||
has_self,
|
||||
} = *self;
|
||||
|
||||
parent.hash_stable(hcx, hasher);
|
||||
parent_regions.hash_stable(hcx, hasher);
|
||||
parent_types.hash_stable(hcx, hasher);
|
||||
regions.hash_stable(hcx, hasher);
|
||||
types.hash_stable(hcx, hasher);
|
||||
has_self.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ty::RegionParameterDef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::RegionParameterDef {
|
||||
name,
|
||||
def_id,
|
||||
index,
|
||||
issue_32330: _,
|
||||
pure_wrt_drop
|
||||
} = *self;
|
||||
|
||||
name.hash_stable(hcx, hasher);
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
index.hash_stable(hcx, hasher);
|
||||
pure_wrt_drop.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::TypeParameterDef {
|
||||
name,
|
||||
def_id,
|
||||
index,
|
||||
has_default,
|
||||
object_lifetime_default,
|
||||
pure_wrt_drop
|
||||
});
|
||||
|
||||
|
||||
impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>>
|
||||
for ::middle::resolve_lifetime::Set1<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'tcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use middle::resolve_lifetime::Set1;
|
||||
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
Set1::Empty |
|
||||
Set1::Many => {
|
||||
// Nothing to do.
|
||||
}
|
||||
Set1::One(ref value) => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ::middle::resolve_lifetime::Region {
|
||||
Static,
|
||||
EarlyBound(index, decl),
|
||||
LateBound(db_index, decl),
|
||||
LateBoundAnon(db_index, anon_index),
|
||||
Free(call_site_scope_data, decl)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::middle::region::CallSiteScopeData {
|
||||
fn_id,
|
||||
body_id
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ty::DebruijnIndex {
|
||||
depth
|
||||
});
|
@ -8,13 +8,23 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! ICH - Incremental Compilation Hash
|
||||
|
||||
pub use self::fingerprint::Fingerprint;
|
||||
pub use self::def_path_hash::DefPathHashes;
|
||||
pub use self::caching_codemap_view::CachingCodemapView;
|
||||
pub use self::hcx::{StableHashingContext, NodeIdHashingMode};
|
||||
|
||||
mod fingerprint;
|
||||
mod def_path_hash;
|
||||
mod caching_codemap_view;
|
||||
mod hcx;
|
||||
|
||||
mod impls_const_math;
|
||||
mod impls_hir;
|
||||
mod impls_mir;
|
||||
mod impls_ty;
|
||||
mod impls_syntax;
|
||||
|
||||
pub const ATTR_DIRTY: &'static str = "rustc_dirty";
|
||||
pub const ATTR_CLEAN: &'static str = "rustc_clean";
|
||||
@ -22,6 +32,20 @@ pub const ATTR_DIRTY_METADATA: &'static str = "rustc_metadata_dirty";
|
||||
pub const ATTR_CLEAN_METADATA: &'static str = "rustc_metadata_clean";
|
||||
pub const ATTR_IF_THIS_CHANGED: &'static str = "rustc_if_this_changed";
|
||||
pub const ATTR_THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need";
|
||||
pub const ATTR_PARTITION_REUSED: &'static str = "rustc_partition_reused";
|
||||
pub const ATTR_PARTITION_TRANSLATED: &'static str = "rustc_partition_translated";
|
||||
|
||||
|
||||
pub const DEP_GRAPH_ASSERT_ATTRS: &'static [&'static str] = &[
|
||||
ATTR_IF_THIS_CHANGED,
|
||||
ATTR_THEN_THIS_WOULD_NEED,
|
||||
ATTR_DIRTY,
|
||||
ATTR_CLEAN,
|
||||
ATTR_DIRTY_METADATA,
|
||||
ATTR_CLEAN_METADATA,
|
||||
ATTR_PARTITION_REUSED,
|
||||
ATTR_PARTITION_TRANSLATED,
|
||||
];
|
||||
|
||||
pub const IGNORED_ATTRIBUTES: &'static [&'static str] = &[
|
||||
"cfg",
|
||||
@ -30,5 +54,7 @@ pub const IGNORED_ATTRIBUTES: &'static [&'static str] = &[
|
||||
ATTR_DIRTY,
|
||||
ATTR_CLEAN,
|
||||
ATTR_DIRTY_METADATA,
|
||||
ATTR_CLEAN_METADATA
|
||||
ATTR_CLEAN_METADATA,
|
||||
ATTR_PARTITION_REUSED,
|
||||
ATTR_PARTITION_TRANSLATED,
|
||||
];
|
||||
|
@ -41,6 +41,7 @@
|
||||
#![feature(specialization)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(unboxed_closures)]
|
||||
#![feature(discriminant_value)]
|
||||
|
||||
extern crate arena;
|
||||
extern crate core;
|
||||
|
@ -8,6 +8,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// ignore-tidy-linelength
|
||||
|
||||
macro_rules! enum_from_u32 {
|
||||
($(#[$attr:meta])* pub enum $name:ident {
|
||||
$($variant:ident = $e:expr,)*
|
||||
@ -59,3 +61,80 @@ macro_rules! span_bug {
|
||||
$crate::session::span_bug_fmt(file!(), line!(), $span, format_args!($($message)*))
|
||||
})
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! __impl_stable_hash_field {
|
||||
(DECL IGNORED) => (_);
|
||||
(DECL $name:ident) => (ref $name);
|
||||
(USE IGNORED $ctx:expr, $hasher:expr) => ({});
|
||||
(USE $name:ident, $ctx:expr, $hasher:expr) => ($name.hash_stable($ctx, $hasher));
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_stable_hash_for {
|
||||
(enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* }) => {
|
||||
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx>> for $enum_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
use $enum_name::*;
|
||||
::std::mem::discriminant(self).hash_stable(__ctx, __hasher);
|
||||
|
||||
match *self {
|
||||
$(
|
||||
$variant $( ( $( __impl_stable_hash_field!(DECL $arg) ),* ) )* => {
|
||||
$($( __impl_stable_hash_field!(USE $arg, __ctx, __hasher) );*)*
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
(struct $struct_name:path { $($field:ident),* }) => {
|
||||
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx>> for $struct_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
let $struct_name {
|
||||
$(ref $field),*
|
||||
} = *self;
|
||||
|
||||
$( $field.hash_stable(__ctx, __hasher));*
|
||||
}
|
||||
}
|
||||
};
|
||||
(tuple_struct $struct_name:path { $($field:ident),* }) => {
|
||||
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx>> for $struct_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
let $struct_name (
|
||||
$(ref $field),*
|
||||
) = *self;
|
||||
|
||||
$( $field.hash_stable(__ctx, __hasher));*
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_stable_hash_for_spanned {
|
||||
($T:path) => (
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ::syntax::codemap::Spanned<$T>
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.node.hash_stable(hcx, hasher);
|
||||
self.span.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -202,11 +202,14 @@ pub enum ImmutabilityBlame<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> cmt_<'tcx> {
|
||||
fn resolve_field(&self, field_name: FieldName) -> (&'tcx ty::AdtDef, &'tcx ty::FieldDef)
|
||||
fn resolve_field(&self, field_name: FieldName) -> Option<(&'tcx ty::AdtDef, &'tcx ty::FieldDef)>
|
||||
{
|
||||
let adt_def = self.ty.ty_adt_def().unwrap_or_else(|| {
|
||||
bug!("interior cmt {:?} is not an ADT", self)
|
||||
});
|
||||
let adt_def = match self.ty.sty {
|
||||
ty::TyAdt(def, _) => def,
|
||||
ty::TyTuple(..) => return None,
|
||||
// closures get `Categorization::Upvar` rather than `Categorization::Interior`
|
||||
_ => bug!("interior cmt {:?} is not an ADT", self)
|
||||
};
|
||||
let variant_def = match self.cat {
|
||||
Categorization::Downcast(_, variant_did) => {
|
||||
adt_def.variant_with_id(variant_did)
|
||||
@ -220,7 +223,7 @@ impl<'tcx> cmt_<'tcx> {
|
||||
NamedField(name) => variant_def.field_named(name),
|
||||
PositionalField(idx) => &variant_def.fields[idx]
|
||||
};
|
||||
(adt_def, field_def)
|
||||
Some((adt_def, field_def))
|
||||
}
|
||||
|
||||
pub fn immutability_blame(&self) -> Option<ImmutabilityBlame<'tcx>> {
|
||||
@ -232,8 +235,9 @@ impl<'tcx> cmt_<'tcx> {
|
||||
Categorization::Local(node_id) =>
|
||||
Some(ImmutabilityBlame::LocalDeref(node_id)),
|
||||
Categorization::Interior(ref base_cmt, InteriorField(field_name)) => {
|
||||
let (adt_def, field_def) = base_cmt.resolve_field(field_name);
|
||||
Some(ImmutabilityBlame::AdtFieldDeref(adt_def, field_def))
|
||||
base_cmt.resolve_field(field_name).map(|(adt_def, field_def)| {
|
||||
ImmutabilityBlame::AdtFieldDeref(adt_def, field_def)
|
||||
})
|
||||
}
|
||||
Categorization::Upvar(Upvar { id, .. }) => {
|
||||
if let NoteClosureEnv(..) = self.note {
|
||||
|
@ -10,7 +10,9 @@
|
||||
|
||||
use std::cell::{Ref, RefCell};
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use ich::StableHashingContext;
|
||||
use mir::{Mir, BasicBlock};
|
||||
|
||||
use rustc_serialize as serialize;
|
||||
@ -33,6 +35,13 @@ impl serialize::Decodable for Cache {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for Cache {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a, 'tcx>,
|
||||
_: &mut StableHasher<W>) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub fn new() -> Self {
|
||||
|
@ -243,6 +243,19 @@ impl<'tcx> Mir<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct Mir<'tcx> {
|
||||
basic_blocks,
|
||||
visibility_scopes,
|
||||
promoted,
|
||||
return_ty,
|
||||
local_decls,
|
||||
arg_count,
|
||||
upvar_decls,
|
||||
spread_arg,
|
||||
span,
|
||||
cache
|
||||
});
|
||||
|
||||
impl<'tcx> Index<BasicBlock> for Mir<'tcx> {
|
||||
type Output = BasicBlockData<'tcx>;
|
||||
|
||||
@ -830,6 +843,11 @@ pub struct Static<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct Static<'tcx> {
|
||||
def_id,
|
||||
ty
|
||||
});
|
||||
|
||||
/// The `Projection` data structure defines things of the form `B.x`
|
||||
/// or `*B` or `B[index]`. Note that it is parameterized because it is
|
||||
/// shared between `Constant` and `Lvalue`. See the aliases
|
||||
|
@ -19,6 +19,7 @@ use dep_graph::{self, DepNode};
|
||||
use hir::{map as hir_map, FreevarMap, TraitMap};
|
||||
use hir::def::{Def, CtorKind, ExportMap};
|
||||
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use ich::StableHashingContext;
|
||||
use middle::const_val::ConstVal;
|
||||
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
|
||||
use middle::privacy::AccessLevels;
|
||||
@ -50,6 +51,8 @@ use syntax_pos::{DUMMY_SP, Span};
|
||||
use rustc_const_math::ConstInt;
|
||||
|
||||
use rustc_data_structures::accumulate_vec::IntoIter as AccIntoIter;
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
|
||||
HashStable};
|
||||
|
||||
use hir;
|
||||
use hir::itemlikevisit::ItemLikeVisitor;
|
||||
@ -1379,6 +1382,25 @@ impl<'tcx> serialize::UseSpecializedEncodable for &'tcx AdtDef {
|
||||
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx AdtDef {}
|
||||
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for AdtDef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::AdtDef {
|
||||
did,
|
||||
ref variants,
|
||||
ref flags,
|
||||
ref repr,
|
||||
} = *self;
|
||||
|
||||
did.hash_stable(hcx, hasher);
|
||||
variants.hash_stable(hcx, hasher);
|
||||
flags.hash_stable(hcx, hasher);
|
||||
repr.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum AdtKind { Struct, Union, Enum }
|
||||
|
||||
@ -1391,6 +1413,13 @@ pub struct ReprOptions {
|
||||
pub int: Option<attr::IntType>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ReprOptions {
|
||||
c,
|
||||
packed,
|
||||
simd,
|
||||
int
|
||||
});
|
||||
|
||||
impl ReprOptions {
|
||||
pub fn new(tcx: TyCtxt, did: DefId) -> ReprOptions {
|
||||
let mut ret = ReprOptions::default();
|
||||
|
@ -490,6 +490,17 @@ fn eval_const_expr_partial<'a, 'tcx>(cx: &ConstContext<'a, 'tcx>,
|
||||
_ => span_bug!(e.span, "typeck error"),
|
||||
})
|
||||
}
|
||||
(Char(a), Char(b)) => {
|
||||
Bool(match op.node {
|
||||
hir::BiEq => a == b,
|
||||
hir::BiNe => a != b,
|
||||
hir::BiLt => a < b,
|
||||
hir::BiLe => a <= b,
|
||||
hir::BiGe => a >= b,
|
||||
hir::BiGt => a > b,
|
||||
_ => span_bug!(e.span, "typeck error"),
|
||||
})
|
||||
}
|
||||
|
||||
_ => signal!(e, MiscBinaryOp),
|
||||
}
|
||||
|
@ -37,6 +37,8 @@
|
||||
#![feature(unsize)]
|
||||
#![feature(i128_type)]
|
||||
#![feature(conservative_impl_trait)]
|
||||
#![feature(discriminant_value)]
|
||||
#![feature(specialization)]
|
||||
|
||||
#![cfg_attr(unix, feature(libc))]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::hash::Hasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use blake2b::Blake2bHasher;
|
||||
@ -174,3 +174,193 @@ impl<W> Hasher for StableHasher<W> {
|
||||
self.write_ileb128(i as i64);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Something that implements `HashStable<CTX>` can be hashed in a way that is
|
||||
/// stable across multiple compiliation sessions.
|
||||
pub trait HashStable<CTX> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>);
|
||||
}
|
||||
|
||||
// Implement HashStable by just calling `Hash::hash()`. This works fine for
|
||||
// self-contained values that don't depend on the hashing context `CTX`.
|
||||
macro_rules! impl_stable_hash_via_hash {
|
||||
($t:ty) => (
|
||||
impl<CTX> HashStable<CTX> for $t {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
::std::hash::Hash::hash(self, hasher);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
impl_stable_hash_via_hash!(i8);
|
||||
impl_stable_hash_via_hash!(i16);
|
||||
impl_stable_hash_via_hash!(i32);
|
||||
impl_stable_hash_via_hash!(i64);
|
||||
impl_stable_hash_via_hash!(isize);
|
||||
|
||||
impl_stable_hash_via_hash!(u8);
|
||||
impl_stable_hash_via_hash!(u16);
|
||||
impl_stable_hash_via_hash!(u32);
|
||||
impl_stable_hash_via_hash!(u64);
|
||||
impl_stable_hash_via_hash!(usize);
|
||||
|
||||
impl_stable_hash_via_hash!(u128);
|
||||
impl_stable_hash_via_hash!(i128);
|
||||
|
||||
impl_stable_hash_via_hash!(char);
|
||||
impl_stable_hash_via_hash!(());
|
||||
|
||||
impl<CTX> HashStable<CTX> for f32 {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let val: u32 = unsafe {
|
||||
::std::mem::transmute(*self)
|
||||
};
|
||||
val.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for f64 {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let val: u64 = unsafe {
|
||||
::std::mem::transmute(*self)
|
||||
};
|
||||
val.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T1: HashStable<CTX>, T2: HashStable<CTX>, CTX> HashStable<CTX> for (T1, T2) {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.0.hash_stable(ctx, hasher);
|
||||
self.1.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for [T] {
|
||||
default fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for item in self {
|
||||
item.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Vec<T> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(&self[..]).hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for str {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash(hasher);
|
||||
self.as_bytes().hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for bool {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(if *self { 1u8 } else { 0u8 }).hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<T, CTX> HashStable<CTX> for Option<T>
|
||||
where T: HashStable<CTX>
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
if let Some(ref value) = *self {
|
||||
1u8.hash_stable(ctx, hasher);
|
||||
value.hash_stable(ctx, hasher);
|
||||
} else {
|
||||
0u8.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, CTX> HashStable<CTX> for &'a T
|
||||
where T: HashStable<CTX>
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(**self).hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, CTX> HashStable<CTX> for ::std::mem::Discriminant<T> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
::std::hash::Hash::hash(self, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, CTX> HashStable<CTX> for ::std::collections::BTreeMap<K, V>
|
||||
where K: Ord + HashStable<CTX>,
|
||||
V: HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for (k, v) in self {
|
||||
k.hash_stable(ctx, hasher);
|
||||
v.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, CTX> HashStable<CTX> for ::std::collections::BTreeSet<T>
|
||||
where T: Ord + HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for v in self {
|
||||
v.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<I, T>
|
||||
where T: HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for v in &self.raw {
|
||||
v.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,24 +27,17 @@
|
||||
//! at the end of compilation would be different from those computed
|
||||
//! at the beginning.
|
||||
|
||||
use syntax::ast;
|
||||
use std::cell::RefCell;
|
||||
use std::hash::Hash;
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
||||
use rustc::hir::intravisit as visit;
|
||||
use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
|
||||
use rustc::ich::{Fingerprint, DefPathHashes, CachingCodemapView};
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use rustc::ich::{Fingerprint, StableHashingContext};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc::util::common::record_time;
|
||||
use rustc::session::config::DebugInfoLevel::NoDebugInfo;
|
||||
|
||||
use self::svh_visitor::StrictVersionHashVisitor;
|
||||
|
||||
mod svh_visitor;
|
||||
|
||||
pub type IchHasher = StableHasher<Fingerprint>;
|
||||
|
||||
@ -94,91 +87,42 @@ impl<'a> ::std::ops::Index<&'a DepNode<DefId>> for IncrementalHashesMap {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> IncrementalHashesMap {
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
let krate = tcx.hir.krate();
|
||||
let hash_spans = tcx.sess.opts.debuginfo != NoDebugInfo;
|
||||
let mut visitor = HashItemsVisitor {
|
||||
tcx: tcx,
|
||||
hashes: IncrementalHashesMap::new(),
|
||||
def_path_hashes: DefPathHashes::new(tcx),
|
||||
codemap: CachingCodemapView::new(tcx),
|
||||
hash_spans: hash_spans,
|
||||
};
|
||||
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
|
||||
visitor.calculate_def_id(DefId::local(CRATE_DEF_INDEX), |v| {
|
||||
v.hash_crate_root_module(krate);
|
||||
});
|
||||
krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
|
||||
|
||||
for macro_def in krate.exported_macros.iter() {
|
||||
visitor.calculate_node_id(macro_def.id,
|
||||
|v| v.visit_macro_def(macro_def));
|
||||
}
|
||||
});
|
||||
|
||||
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
|
||||
|
||||
record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash());
|
||||
visitor.hashes
|
||||
}
|
||||
|
||||
struct HashItemsVisitor<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_path_hashes: DefPathHashes<'a, 'tcx>,
|
||||
codemap: CachingCodemapView<'tcx>,
|
||||
struct ComputeItemHashesVisitor<'a, 'tcx: 'a> {
|
||||
hcx: StableHashingContext<'a, 'tcx>,
|
||||
hashes: IncrementalHashesMap,
|
||||
hash_spans: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||
fn calculate_node_id<W>(&mut self, id: ast::NodeId, walk_op: W)
|
||||
where W: for<'v> FnMut(&mut StrictVersionHashVisitor<'v, 'a, 'tcx>)
|
||||
impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn compute_and_store_ich_for_item_like<T>(&mut self,
|
||||
dep_node: DepNode<DefId>,
|
||||
hash_bodies: bool,
|
||||
item_like: T)
|
||||
where T: HashStable<StableHashingContext<'a, 'tcx>>
|
||||
{
|
||||
let def_id = self.tcx.hir.local_def_id(id);
|
||||
self.calculate_def_id(def_id, walk_op)
|
||||
}
|
||||
let mut hasher = IchHasher::new();
|
||||
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
|
||||
item_like.hash_stable(hcx, &mut hasher);
|
||||
});
|
||||
|
||||
fn calculate_def_id<W>(&mut self, def_id: DefId, mut walk_op: W)
|
||||
where W: for<'v> FnMut(&mut StrictVersionHashVisitor<'v, 'a, 'tcx>)
|
||||
{
|
||||
assert!(def_id.is_local());
|
||||
debug!("HashItemsVisitor::calculate(def_id={:?})", def_id);
|
||||
self.calculate_def_hash(DepNode::Hir(def_id), false, &mut walk_op);
|
||||
self.calculate_def_hash(DepNode::HirBody(def_id), true, &mut walk_op);
|
||||
}
|
||||
|
||||
fn calculate_def_hash<W>(&mut self,
|
||||
dep_node: DepNode<DefId>,
|
||||
hash_bodies: bool,
|
||||
walk_op: &mut W)
|
||||
where W: for<'v> FnMut(&mut StrictVersionHashVisitor<'v, 'a, 'tcx>)
|
||||
{
|
||||
let mut state = IchHasher::new();
|
||||
walk_op(&mut StrictVersionHashVisitor::new(&mut state,
|
||||
self.tcx,
|
||||
&mut self.def_path_hashes,
|
||||
&mut self.codemap,
|
||||
self.hash_spans,
|
||||
hash_bodies));
|
||||
let bytes_hashed = state.bytes_hashed();
|
||||
let item_hash = state.finish();
|
||||
let bytes_hashed = hasher.bytes_hashed();
|
||||
let item_hash = hasher.finish();
|
||||
debug!("calculate_def_hash: dep_node={:?} hash={:?}", dep_node, item_hash);
|
||||
self.hashes.insert(dep_node, item_hash);
|
||||
|
||||
let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() +
|
||||
let tcx = self.hcx.tcx();
|
||||
let bytes_hashed =
|
||||
tcx.sess.perf_stats.incr_comp_bytes_hashed.get() +
|
||||
bytes_hashed;
|
||||
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
|
||||
tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
|
||||
}
|
||||
|
||||
fn compute_crate_hash(&mut self) {
|
||||
let krate = self.tcx.hir.krate();
|
||||
let tcx = self.hcx.tcx();
|
||||
let krate = tcx.hir.krate();
|
||||
|
||||
let mut crate_state = IchHasher::new();
|
||||
|
||||
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
||||
let crate_disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
"crate_disambiguator".hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().len().hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().hash(&mut crate_state);
|
||||
@ -186,7 +130,7 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||
// add each item (in some deterministic order) to the overall
|
||||
// crate hash.
|
||||
{
|
||||
let def_path_hashes = &mut self.def_path_hashes;
|
||||
let hcx = &mut self.hcx;
|
||||
let mut item_hashes: Vec<_> =
|
||||
self.hashes.iter()
|
||||
.map(|(item_dep_node, &item_hash)| {
|
||||
@ -194,7 +138,7 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||
// DepNode<u64> where the u64 is the
|
||||
// hash of the def-id's def-path:
|
||||
let item_dep_node =
|
||||
item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did)))
|
||||
item_dep_node.map_def(|&did| Some(hcx.def_path_hash(did)))
|
||||
.unwrap();
|
||||
(item_dep_node, item_hash)
|
||||
})
|
||||
@ -203,40 +147,85 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||
item_hashes.hash(&mut crate_state);
|
||||
}
|
||||
|
||||
{
|
||||
let mut visitor = StrictVersionHashVisitor::new(&mut crate_state,
|
||||
self.tcx,
|
||||
&mut self.def_path_hashes,
|
||||
&mut self.codemap,
|
||||
self.hash_spans,
|
||||
false);
|
||||
visitor.hash_attributes(&krate.attrs);
|
||||
}
|
||||
krate.attrs.hash_stable(&mut self.hcx, &mut crate_state);
|
||||
|
||||
let crate_hash = crate_state.finish();
|
||||
self.hashes.insert(DepNode::Krate, crate_hash);
|
||||
debug!("calculate_crate_hash: crate_hash={:?}", crate_hash);
|
||||
}
|
||||
|
||||
fn hash_crate_root_module(&mut self, krate: &'tcx hir::Crate) {
|
||||
let hir::Crate {
|
||||
ref module,
|
||||
// Crate attributes are not copied over to the root `Mod`, so hash
|
||||
// them explicitly here.
|
||||
ref attrs,
|
||||
span,
|
||||
|
||||
// These fields are handled separately:
|
||||
exported_macros: _,
|
||||
items: _,
|
||||
trait_items: _,
|
||||
impl_items: _,
|
||||
bodies: _,
|
||||
trait_impls: _,
|
||||
trait_default_impl: _,
|
||||
body_ids: _,
|
||||
} = *krate;
|
||||
|
||||
let def_id = DefId::local(CRATE_DEF_INDEX);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::Hir(def_id),
|
||||
false,
|
||||
(module, (span, attrs)));
|
||||
self.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id),
|
||||
true,
|
||||
(module, (span, attrs)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for HashItemsVisitor<'a, 'tcx> {
|
||||
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
|
||||
NestedVisitorMap::None
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &'tcx hir::Item) {
|
||||
self.calculate_node_id(item.id, |v| v.visit_item(item));
|
||||
visit::walk_item(self, item);
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, item);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, item);
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
|
||||
self.calculate_node_id(trait_item.id, |v| v.visit_trait_item(trait_item));
|
||||
visit::walk_trait_item(self, trait_item);
|
||||
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, item);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, item);
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
|
||||
self.calculate_node_id(impl_item.id, |v| v.visit_impl_item(impl_item));
|
||||
visit::walk_impl_item(self, impl_item);
|
||||
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, item);
|
||||
self.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, item);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> IncrementalHashesMap {
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
let krate = tcx.hir.krate();
|
||||
|
||||
let mut visitor = ComputeItemHashesVisitor {
|
||||
hcx: StableHashingContext::new(tcx),
|
||||
hashes: IncrementalHashesMap::new(),
|
||||
};
|
||||
|
||||
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
|
||||
visitor.hash_crate_root_module(krate);
|
||||
krate.visit_all_item_likes(&mut visitor);
|
||||
|
||||
for macro_def in krate.exported_macros.iter() {
|
||||
let def_id = tcx.hir.local_def_id(macro_def.id);
|
||||
visitor.compute_and_store_ich_for_item_like(DepNode::Hir(def_id), false, macro_def);
|
||||
visitor.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, macro_def);
|
||||
}
|
||||
});
|
||||
|
||||
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
|
||||
|
||||
record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash());
|
||||
visitor.hashes
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -22,7 +22,6 @@
|
||||
#![feature(rustc_private)]
|
||||
#![feature(staged_api)]
|
||||
#![feature(rand)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(conservative_impl_trait)]
|
||||
#![cfg_attr(stage0, feature(pub_restricted))]
|
||||
|
||||
|
@ -32,8 +32,7 @@ use syntax::ast;
|
||||
|
||||
use {ModuleSource, ModuleTranslation};
|
||||
|
||||
const PARTITION_REUSED: &'static str = "rustc_partition_reused";
|
||||
const PARTITION_TRANSLATED: &'static str = "rustc_partition_translated";
|
||||
use rustc::ich::{ATTR_PARTITION_REUSED, ATTR_PARTITION_TRANSLATED};
|
||||
|
||||
const MODULE: &'static str = "module";
|
||||
const CFG: &'static str = "cfg";
|
||||
@ -62,9 +61,9 @@ struct AssertModuleSource<'a, 'tcx: 'a> {
|
||||
|
||||
impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
|
||||
fn check_attr(&self, attr: &ast::Attribute) {
|
||||
let disposition = if attr.check_name(PARTITION_REUSED) {
|
||||
let disposition = if attr.check_name(ATTR_PARTITION_REUSED) {
|
||||
Disposition::Reused
|
||||
} else if attr.check_name(PARTITION_TRANSLATED) {
|
||||
} else if attr.check_name(ATTR_PARTITION_TRANSLATED) {
|
||||
Disposition::Translated
|
||||
} else {
|
||||
return;
|
||||
|
@ -16,10 +16,10 @@
|
||||
//! of `fmt::Display`. Example usage:
|
||||
//!
|
||||
//! ```rust,ignore
|
||||
//! use rustdoc::html::markdown::{Markdown, MarkdownOutputStyle};
|
||||
//! use rustdoc::html::markdown::Markdown;
|
||||
//!
|
||||
//! let s = "My *markdown* _text_";
|
||||
//! let html = format!("{}", Markdown(s, MarkdownOutputStyle::Fancy));
|
||||
//! let html = format!("{}", Markdown(s));
|
||||
//! // ... something using html
|
||||
//! ```
|
||||
|
||||
@ -27,7 +27,7 @@
|
||||
|
||||
use std::ascii::AsciiExt;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
use std::default::Default;
|
||||
use std::fmt::{self, Write};
|
||||
use std::str;
|
||||
@ -37,43 +37,23 @@ use syntax::codemap::Span;
|
||||
use html::render::derive_id;
|
||||
use html::toc::TocBuilder;
|
||||
use html::highlight;
|
||||
use html::escape::Escape;
|
||||
use test;
|
||||
|
||||
use pulldown_cmark::{self, Event, Parser, Tag};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum MarkdownOutputStyle {
|
||||
Compact,
|
||||
Fancy,
|
||||
}
|
||||
|
||||
impl MarkdownOutputStyle {
|
||||
pub fn is_compact(&self) -> bool {
|
||||
match *self {
|
||||
MarkdownOutputStyle::Compact => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_fancy(&self) -> bool {
|
||||
match *self {
|
||||
MarkdownOutputStyle::Fancy => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
use pulldown_cmark::{html, Event, Tag, Parser};
|
||||
use pulldown_cmark::{Options, OPTION_ENABLE_FOOTNOTES, OPTION_ENABLE_TABLES};
|
||||
|
||||
/// A unit struct which has the `fmt::Display` trait implemented. When
|
||||
/// formatted, this struct will emit the HTML corresponding to the rendered
|
||||
/// version of the contained markdown string.
|
||||
// The second parameter is whether we need a shorter version or not.
|
||||
pub struct Markdown<'a>(pub &'a str, pub MarkdownOutputStyle);
|
||||
pub struct Markdown<'a>(pub &'a str);
|
||||
/// A unit struct like `Markdown`, that renders the markdown with a
|
||||
/// table of contents.
|
||||
pub struct MarkdownWithToc<'a>(pub &'a str);
|
||||
/// A unit struct like `Markdown`, that renders the markdown escaping HTML tags.
|
||||
pub struct MarkdownHtml<'a>(pub &'a str);
|
||||
/// A unit struct like `Markdown`, that renders only the first paragraph.
|
||||
pub struct MarkdownSummaryLine<'a>(pub &'a str);
|
||||
|
||||
/// Returns Some(code) if `s` is a line that should be stripped from
|
||||
/// documentation but used in example code. `code` is the portion of
|
||||
@ -90,12 +70,21 @@ fn stripped_filtered_line<'a>(s: &'a str) -> Option<&'a str> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a new string with all consecutive whitespace collapsed into
|
||||
/// single spaces.
|
||||
/// Convert chars from a title for an id.
|
||||
///
|
||||
/// Any leading or trailing whitespace will be trimmed.
|
||||
fn collapse_whitespace(s: &str) -> String {
|
||||
s.split_whitespace().collect::<Vec<_>>().join(" ")
|
||||
/// "Hello, world!" -> "hello-world"
|
||||
fn slugify(c: char) -> Option<char> {
|
||||
if c.is_alphanumeric() || c == '-' || c == '_' {
|
||||
if c.is_ascii() {
|
||||
Some(c.to_ascii_lowercase())
|
||||
} else {
|
||||
Some(c)
|
||||
}
|
||||
} else if c.is_whitespace() && c.is_ascii() {
|
||||
Some('-')
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// Information about the playground if a URL has been specified, containing an
|
||||
@ -104,103 +93,50 @@ thread_local!(pub static PLAYGROUND: RefCell<Option<(Option<String>, String)>> =
|
||||
RefCell::new(None)
|
||||
});
|
||||
|
||||
macro_rules! event_loop_break {
|
||||
($parser:expr, $toc_builder:expr, $shorter:expr, $buf:expr, $escape:expr, $id:expr,
|
||||
$($end_event:pat)|*) => {{
|
||||
fn inner(id: &mut Option<&mut String>, s: &str) {
|
||||
if let Some(ref mut id) = *id {
|
||||
id.push_str(s);
|
||||
/// Adds syntax highlighting and playground Run buttons to rust code blocks.
|
||||
struct CodeBlocks<'a, I: Iterator<Item = Event<'a>>> {
|
||||
inner: I,
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> CodeBlocks<'a, I> {
|
||||
fn new(iter: I) -> Self {
|
||||
CodeBlocks {
|
||||
inner: iter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'a, I> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let event = self.inner.next();
|
||||
if let Some(Event::Start(Tag::CodeBlock(lang))) = event {
|
||||
if !LangString::parse(&lang).rust {
|
||||
return Some(Event::Start(Tag::CodeBlock(lang)));
|
||||
}
|
||||
} else {
|
||||
return event;
|
||||
}
|
||||
while let Some(event) = $parser.next() {
|
||||
match event {
|
||||
$($end_event)|* => break,
|
||||
Event::Text(ref s) => {
|
||||
debug!("Text");
|
||||
inner($id, s);
|
||||
if $escape {
|
||||
$buf.push_str(&format!("{}", Escape(s)));
|
||||
} else {
|
||||
$buf.push_str(s);
|
||||
}
|
||||
}
|
||||
Event::SoftBreak => {
|
||||
debug!("SoftBreak");
|
||||
if !$buf.is_empty() {
|
||||
$buf.push(' ');
|
||||
}
|
||||
}
|
||||
x => {
|
||||
looper($parser, &mut $buf, Some(x), $toc_builder, $shorter, $id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
struct ParserWrapper<'a> {
|
||||
parser: Parser<'a>,
|
||||
// The key is the footnote reference. The value is the footnote definition and the id.
|
||||
footnotes: HashMap<String, (String, u16)>,
|
||||
}
|
||||
|
||||
impl<'a> ParserWrapper<'a> {
|
||||
pub fn new(s: &'a str) -> ParserWrapper<'a> {
|
||||
ParserWrapper {
|
||||
parser: Parser::new_ext(s, pulldown_cmark::OPTION_ENABLE_TABLES |
|
||||
pulldown_cmark::OPTION_ENABLE_FOOTNOTES),
|
||||
footnotes: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Option<Event<'a>> {
|
||||
self.parser.next()
|
||||
}
|
||||
|
||||
pub fn get_entry(&mut self, key: &str) -> &mut (String, u16) {
|
||||
let new_id = self.footnotes.keys().count() + 1;
|
||||
let key = key.to_owned();
|
||||
self.footnotes.entry(key).or_insert((String::new(), new_id as u16))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render(w: &mut fmt::Formatter,
|
||||
s: &str,
|
||||
print_toc: bool,
|
||||
shorter: MarkdownOutputStyle) -> fmt::Result {
|
||||
fn code_block(parser: &mut ParserWrapper, buffer: &mut String, lang: &str) {
|
||||
debug!("CodeBlock");
|
||||
let mut origtext = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
for event in &mut self.inner {
|
||||
match event {
|
||||
Event::End(Tag::CodeBlock(_)) => break,
|
||||
Event::End(Tag::CodeBlock(..)) => break,
|
||||
Event::Text(ref s) => {
|
||||
origtext.push_str(s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let origtext = origtext.trim_left();
|
||||
debug!("docblock: ==============\n{:?}\n=======", origtext);
|
||||
|
||||
let lines = origtext.lines().filter(|l| {
|
||||
stripped_filtered_line(*l).is_none()
|
||||
});
|
||||
let text = lines.collect::<Vec<&str>>().join("\n");
|
||||
let block_info = if lang.is_empty() {
|
||||
LangString::all_false()
|
||||
} else {
|
||||
LangString::parse(lang)
|
||||
};
|
||||
if !block_info.rust {
|
||||
buffer.push_str(&format!("<pre><code class=\"language-{}\">{}</code></pre>",
|
||||
lang, text));
|
||||
return
|
||||
}
|
||||
PLAYGROUND.with(|play| {
|
||||
// insert newline to clearly separate it from the
|
||||
// previous block so we can shorten the html output
|
||||
buffer.push('\n');
|
||||
let mut s = String::from("\n");
|
||||
let playground_button = play.borrow().as_ref().and_then(|&(ref krate, ref url)| {
|
||||
if url.is_empty() {
|
||||
return None;
|
||||
@ -210,7 +146,7 @@ pub fn render(w: &mut fmt::Formatter,
|
||||
}).collect::<Vec<&str>>().join("\n");
|
||||
let krate = krate.as_ref().map(|s| &**s);
|
||||
let test = test::maketest(&test, krate, false,
|
||||
&Default::default());
|
||||
&Default::default());
|
||||
let channel = if test.contains("#![feature(") {
|
||||
"&version=nightly"
|
||||
} else {
|
||||
@ -239,376 +175,186 @@ pub fn render(w: &mut fmt::Formatter,
|
||||
url, test_escaped, channel
|
||||
))
|
||||
});
|
||||
buffer.push_str(&highlight::render_with_highlighting(
|
||||
&text,
|
||||
Some("rust-example-rendered"),
|
||||
None,
|
||||
playground_button.as_ref().map(String::as_str)));
|
||||
});
|
||||
s.push_str(&highlight::render_with_highlighting(
|
||||
&text,
|
||||
Some("rust-example-rendered"),
|
||||
None,
|
||||
playground_button.as_ref().map(String::as_str)));
|
||||
Some(Event::Html(s.into()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn heading(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle, level: i32) {
|
||||
debug!("Heading");
|
||||
let mut ret = String::new();
|
||||
let mut id = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, ret, true, &mut Some(&mut id),
|
||||
Event::End(Tag::Header(_)));
|
||||
ret = ret.trim_right().to_owned();
|
||||
/// Make headings links with anchor ids and build up TOC.
|
||||
struct HeadingLinks<'a, 'b, I: Iterator<Item = Event<'a>>> {
|
||||
inner: I,
|
||||
toc: Option<&'b mut TocBuilder>,
|
||||
buf: VecDeque<Event<'a>>,
|
||||
}
|
||||
|
||||
let id = id.chars().filter_map(|c| {
|
||||
if c.is_alphanumeric() || c == '-' || c == '_' {
|
||||
if c.is_ascii() {
|
||||
Some(c.to_ascii_lowercase())
|
||||
} else {
|
||||
Some(c)
|
||||
}
|
||||
} else if c.is_whitespace() && c.is_ascii() {
|
||||
Some('-')
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).collect::<String>();
|
||||
|
||||
let id = derive_id(id);
|
||||
|
||||
let sec = toc_builder.as_mut().map_or("".to_owned(), |builder| {
|
||||
format!("{} ", builder.push(level as u32, ret.clone(), id.clone()))
|
||||
});
|
||||
|
||||
// Render the HTML
|
||||
buffer.push_str(&format!("<h{lvl} id=\"{id}\" class=\"section-header\">\
|
||||
<a href=\"#{id}\">{sec}{}</a></h{lvl}>",
|
||||
ret, lvl = level, id = id, sec = sec));
|
||||
}
|
||||
|
||||
fn inline_code(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("InlineCode");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, false, id, Event::End(Tag::Code));
|
||||
buffer.push_str(&format!("<code>{}</code>",
|
||||
Escape(&collapse_whitespace(content.trim_right()))));
|
||||
}
|
||||
|
||||
fn link(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle, url: &str, title: &str,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("Link");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, id,
|
||||
Event::End(Tag::Link(_, _)));
|
||||
if title.is_empty() {
|
||||
buffer.push_str(&format!("<a href=\"{}\">{}</a>", url, content));
|
||||
} else {
|
||||
buffer.push_str(&format!("<a href=\"{}\" title=\"{}\">{}</a>",
|
||||
url, Escape(title), content));
|
||||
impl<'a, 'b, I: Iterator<Item = Event<'a>>> HeadingLinks<'a, 'b, I> {
|
||||
fn new(iter: I, toc: Option<&'b mut TocBuilder>) -> Self {
|
||||
HeadingLinks {
|
||||
inner: iter,
|
||||
toc: toc,
|
||||
buf: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn image(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle, url: &str, mut title: String,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("Image");
|
||||
event_loop_break!(parser, toc_builder, shorter, title, true, id,
|
||||
Event::End(Tag::Image(_, _)));
|
||||
buffer.push_str(&format!("<img src=\"{}\" alt=\"{}\">", url, title));
|
||||
}
|
||||
impl<'a, 'b, I: Iterator<Item = Event<'a>>> Iterator for HeadingLinks<'a, 'b, I> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn paragraph(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("Paragraph");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, id,
|
||||
Event::End(Tag::Paragraph));
|
||||
buffer.push_str(&format!("<p>{}</p>", content.trim_right()));
|
||||
}
|
||||
|
||||
fn table_cell(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle) {
|
||||
debug!("TableCell");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, &mut None,
|
||||
Event::End(Tag::TableHead) |
|
||||
Event::End(Tag::Table(_)) |
|
||||
Event::End(Tag::TableRow) |
|
||||
Event::End(Tag::TableCell));
|
||||
buffer.push_str(&format!("<td>{}</td>", content.trim()));
|
||||
}
|
||||
|
||||
fn table_row(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle) {
|
||||
debug!("TableRow");
|
||||
let mut content = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
match event {
|
||||
Event::End(Tag::TableHead) |
|
||||
Event::End(Tag::Table(_)) |
|
||||
Event::End(Tag::TableRow) => break,
|
||||
Event::Start(Tag::TableCell) => {
|
||||
table_cell(parser, &mut content, toc_builder, shorter);
|
||||
}
|
||||
x => {
|
||||
looper(parser, &mut content, Some(x), toc_builder, shorter, &mut None);
|
||||
}
|
||||
}
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(e) = self.buf.pop_front() {
|
||||
return Some(e);
|
||||
}
|
||||
buffer.push_str(&format!("<tr>{}</tr>", content));
|
||||
}
|
||||
|
||||
fn table_head(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle) {
|
||||
debug!("TableHead");
|
||||
let mut content = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
match event {
|
||||
Event::End(Tag::TableHead) | Event::End(Tag::Table(_)) => break,
|
||||
Event::Start(Tag::TableCell) => {
|
||||
table_cell(parser, &mut content, toc_builder, shorter);
|
||||
}
|
||||
x => {
|
||||
looper(parser, &mut content, Some(x), toc_builder, shorter, &mut None);
|
||||
let event = self.inner.next();
|
||||
if let Some(Event::Start(Tag::Header(level))) = event {
|
||||
let mut id = String::new();
|
||||
for event in &mut self.inner {
|
||||
match event {
|
||||
Event::End(Tag::Header(..)) => break,
|
||||
Event::Text(ref text) => id.extend(text.chars().filter_map(slugify)),
|
||||
_ => {},
|
||||
}
|
||||
self.buf.push_back(event);
|
||||
}
|
||||
let id = derive_id(id);
|
||||
|
||||
if let Some(ref mut builder) = self.toc {
|
||||
let mut html_header = String::new();
|
||||
html::push_html(&mut html_header, self.buf.iter().cloned());
|
||||
let sec = builder.push(level as u32, html_header, id.clone());
|
||||
self.buf.push_front(Event::InlineHtml(format!("{} ", sec).into()));
|
||||
}
|
||||
|
||||
self.buf.push_back(Event::InlineHtml(format!("</a></h{}>", level).into()));
|
||||
|
||||
let start_tags = format!("<h{level} id=\"{id}\" class=\"section-header\">\
|
||||
<a href=\"#{id}\">",
|
||||
id = id,
|
||||
level = level);
|
||||
return Some(Event::InlineHtml(start_tags.into()));
|
||||
}
|
||||
if !content.is_empty() {
|
||||
buffer.push_str(&format!("<thead><tr>{}</tr></thead>", content.replace("td>", "th>")));
|
||||
event
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts just the first paragraph.
|
||||
struct SummaryLine<'a, I: Iterator<Item = Event<'a>>> {
|
||||
inner: I,
|
||||
started: bool,
|
||||
depth: u32,
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> SummaryLine<'a, I> {
|
||||
fn new(iter: I) -> Self {
|
||||
SummaryLine {
|
||||
inner: iter,
|
||||
started: false,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn table(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle) {
|
||||
debug!("Table");
|
||||
let mut content = String::new();
|
||||
let mut rows = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
match event {
|
||||
Event::End(Tag::Table(_)) => break,
|
||||
Event::Start(Tag::TableHead) => {
|
||||
table_head(parser, &mut content, toc_builder, shorter);
|
||||
}
|
||||
Event::Start(Tag::TableRow) => {
|
||||
table_row(parser, &mut rows, toc_builder, shorter);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.started && self.depth == 0 {
|
||||
return None;
|
||||
}
|
||||
buffer.push_str(&format!("<table>{}{}</table>",
|
||||
content,
|
||||
if shorter.is_compact() || rows.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("<tbody>{}</tbody>", rows)
|
||||
}));
|
||||
}
|
||||
|
||||
fn blockquote(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle) {
|
||||
debug!("BlockQuote");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, &mut None,
|
||||
Event::End(Tag::BlockQuote));
|
||||
buffer.push_str(&format!("<blockquote>{}</blockquote>", content.trim_right()));
|
||||
}
|
||||
|
||||
fn list_item(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle) {
|
||||
debug!("ListItem");
|
||||
let mut content = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
match event {
|
||||
Event::End(Tag::Item) => break,
|
||||
Event::Text(ref s) => {
|
||||
content.push_str(&format!("{}", Escape(s)));
|
||||
}
|
||||
x => {
|
||||
looper(parser, &mut content, Some(x), toc_builder, shorter, &mut None);
|
||||
}
|
||||
}
|
||||
if shorter.is_compact() {
|
||||
break
|
||||
}
|
||||
if !self.started {
|
||||
self.started = true;
|
||||
}
|
||||
buffer.push_str(&format!("<li>{}</li>", content));
|
||||
}
|
||||
|
||||
fn list(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle, is_sorted_list: bool) {
|
||||
debug!("List");
|
||||
let mut content = String::new();
|
||||
while let Some(event) = parser.next() {
|
||||
match event {
|
||||
Event::End(Tag::List(_)) => break,
|
||||
Event::Start(Tag::Item) => {
|
||||
list_item(parser, &mut content, toc_builder, shorter);
|
||||
}
|
||||
x => {
|
||||
looper(parser, &mut content, Some(x), toc_builder, shorter, &mut None);
|
||||
}
|
||||
}
|
||||
if shorter.is_compact() {
|
||||
break
|
||||
}
|
||||
let event = self.inner.next();
|
||||
match event {
|
||||
Some(Event::Start(..)) => self.depth += 1,
|
||||
Some(Event::End(..)) => self.depth -= 1,
|
||||
_ => {}
|
||||
}
|
||||
buffer.push_str(&format!("<{0}>{1}</{0}>",
|
||||
if is_sorted_list { "ol" } else { "ul" },
|
||||
content));
|
||||
event
|
||||
}
|
||||
}
|
||||
|
||||
fn emphasis(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("Emphasis");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, false, id,
|
||||
Event::End(Tag::Emphasis));
|
||||
buffer.push_str(&format!("<em>{}</em>", content));
|
||||
/// Moves all footnote definitions to the end and add back links to the
|
||||
/// references.
|
||||
struct Footnotes<'a, I: Iterator<Item = Event<'a>>> {
|
||||
inner: I,
|
||||
footnotes: HashMap<String, (Vec<Event<'a>>, u16)>,
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> Footnotes<'a, I> {
|
||||
fn new(iter: I) -> Self {
|
||||
Footnotes {
|
||||
inner: iter,
|
||||
footnotes: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn strong(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle, id: &mut Option<&mut String>) {
|
||||
debug!("Strong");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, false, id,
|
||||
Event::End(Tag::Strong));
|
||||
buffer.push_str(&format!("<strong>{}</strong>", content));
|
||||
fn get_entry(&mut self, key: &str) -> &mut (Vec<Event<'a>>, u16) {
|
||||
let new_id = self.footnotes.keys().count() + 1;
|
||||
let key = key.to_owned();
|
||||
self.footnotes.entry(key).or_insert((Vec::new(), new_id as u16))
|
||||
}
|
||||
}
|
||||
|
||||
fn footnote(parser: &mut ParserWrapper, buffer: &mut String,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle,
|
||||
id: &mut Option<&mut String>) {
|
||||
debug!("FootnoteDefinition");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, id,
|
||||
Event::End(Tag::FootnoteDefinition(_)));
|
||||
buffer.push_str(&content);
|
||||
}
|
||||
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for Footnotes<'a, I> {
|
||||
type Item = Event<'a>;
|
||||
|
||||
fn rule(parser: &mut ParserWrapper, buffer: &mut String, toc_builder: &mut Option<TocBuilder>,
|
||||
shorter: MarkdownOutputStyle, id: &mut Option<&mut String>) {
|
||||
debug!("Rule");
|
||||
let mut content = String::new();
|
||||
event_loop_break!(parser, toc_builder, shorter, content, true, id,
|
||||
Event::End(Tag::Rule));
|
||||
buffer.push_str("<hr>");
|
||||
}
|
||||
|
||||
fn looper<'a>(parser: &'a mut ParserWrapper, buffer: &mut String, next_event: Option<Event<'a>>,
|
||||
toc_builder: &mut Option<TocBuilder>, shorter: MarkdownOutputStyle,
|
||||
id: &mut Option<&mut String>) -> bool {
|
||||
if let Some(event) = next_event {
|
||||
match event {
|
||||
Event::Start(Tag::CodeBlock(lang)) => {
|
||||
code_block(parser, buffer, &*lang);
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
match self.inner.next() {
|
||||
Some(Event::FootnoteReference(ref reference)) => {
|
||||
let entry = self.get_entry(&reference);
|
||||
let reference = format!("<sup id=\"supref{0}\"><a href=\"#ref{0}\">{0}\
|
||||
</a></sup>",
|
||||
(*entry).1);
|
||||
return Some(Event::Html(reference.into()));
|
||||
}
|
||||
Event::Start(Tag::Header(level)) => {
|
||||
heading(parser, buffer, toc_builder, shorter, level);
|
||||
Some(Event::Start(Tag::FootnoteDefinition(def))) => {
|
||||
let mut content = Vec::new();
|
||||
for event in &mut self.inner {
|
||||
if let Event::End(Tag::FootnoteDefinition(..)) = event {
|
||||
break;
|
||||
}
|
||||
content.push(event);
|
||||
}
|
||||
let entry = self.get_entry(&def);
|
||||
(*entry).0 = content;
|
||||
}
|
||||
Event::Start(Tag::Code) => {
|
||||
inline_code(parser, buffer, toc_builder, shorter, id);
|
||||
}
|
||||
Event::Start(Tag::Paragraph) => {
|
||||
paragraph(parser, buffer, toc_builder, shorter, id);
|
||||
}
|
||||
Event::Start(Tag::Link(ref url, ref t)) => {
|
||||
link(parser, buffer, toc_builder, shorter, url, t.as_ref(), id);
|
||||
}
|
||||
Event::Start(Tag::Image(ref url, ref t)) => {
|
||||
image(parser, buffer, toc_builder, shorter, url, t.as_ref().to_owned(), id);
|
||||
}
|
||||
Event::Start(Tag::Table(_)) => {
|
||||
table(parser, buffer, toc_builder, shorter);
|
||||
}
|
||||
Event::Start(Tag::BlockQuote) => {
|
||||
blockquote(parser, buffer, toc_builder, shorter);
|
||||
}
|
||||
Event::Start(Tag::List(x)) => {
|
||||
list(parser, buffer, toc_builder, shorter, x.is_some());
|
||||
}
|
||||
Event::Start(Tag::Emphasis) => {
|
||||
emphasis(parser, buffer, toc_builder, shorter, id);
|
||||
}
|
||||
Event::Start(Tag::Strong) => {
|
||||
strong(parser, buffer, toc_builder, shorter, id);
|
||||
}
|
||||
Event::Start(Tag::Rule) => {
|
||||
rule(parser, buffer, toc_builder, shorter, id);
|
||||
}
|
||||
Event::Start(Tag::FootnoteDefinition(ref def)) => {
|
||||
debug!("FootnoteDefinition");
|
||||
let mut content = String::new();
|
||||
let def = def.as_ref();
|
||||
footnote(parser, &mut content, toc_builder, shorter, id);
|
||||
let entry = parser.get_entry(def);
|
||||
let cur_id = (*entry).1;
|
||||
(*entry).0.push_str(&format!("<li id=\"ref{}\">{} <a href=\"#supref{0}\" \
|
||||
rev=\"footnote\">↩</a></p></li>",
|
||||
cur_id,
|
||||
if content.ends_with("</p>") {
|
||||
&content[..content.len() - 4]
|
||||
} else {
|
||||
&content
|
||||
}));
|
||||
}
|
||||
Event::FootnoteReference(ref reference) => {
|
||||
debug!("FootnoteReference");
|
||||
let entry = parser.get_entry(reference.as_ref());
|
||||
buffer.push_str(&format!("<sup id=\"supref{0}\"><a href=\"#ref{0}\">{0}</a>\
|
||||
</sup>",
|
||||
(*entry).1));
|
||||
}
|
||||
Event::HardBreak => {
|
||||
debug!("HardBreak");
|
||||
if shorter.is_fancy() {
|
||||
buffer.push_str("<br>");
|
||||
} else if !buffer.is_empty() {
|
||||
buffer.push(' ');
|
||||
Some(e) => return Some(e),
|
||||
None => {
|
||||
if !self.footnotes.is_empty() {
|
||||
let mut v: Vec<_> = self.footnotes.drain().map(|(_, x)| x).collect();
|
||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
let mut ret = String::from("<div class=\"footnotes\"><hr><ol>");
|
||||
for (mut content, id) in v {
|
||||
write!(ret, "<li id=\"ref{}\">", id).unwrap();
|
||||
let mut is_paragraph = false;
|
||||
if let Some(&Event::End(Tag::Paragraph)) = content.last() {
|
||||
content.pop();
|
||||
is_paragraph = true;
|
||||
}
|
||||
html::push_html(&mut ret, content.into_iter());
|
||||
write!(ret,
|
||||
" <a href=\"#supref{}\" rev=\"footnote\">↩</a>",
|
||||
id).unwrap();
|
||||
if is_paragraph {
|
||||
ret.push_str("</p>");
|
||||
}
|
||||
ret.push_str("</li>");
|
||||
}
|
||||
ret.push_str("</ol></div>");
|
||||
return Some(Event::Html(ret.into()));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Event::Html(h) | Event::InlineHtml(h) => {
|
||||
debug!("Html/InlineHtml");
|
||||
buffer.push_str(&*h);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
shorter.is_fancy()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
let mut toc_builder = if print_toc {
|
||||
Some(TocBuilder::new())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut buffer = String::new();
|
||||
let mut parser = ParserWrapper::new(s);
|
||||
loop {
|
||||
let next_event = parser.next();
|
||||
if !looper(&mut parser, &mut buffer, next_event, &mut toc_builder, shorter, &mut None) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if !parser.footnotes.is_empty() {
|
||||
let mut v: Vec<_> = parser.footnotes.values().collect();
|
||||
v.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
buffer.push_str(&format!("<div class=\"footnotes\"><hr><ol>{}</ol></div>",
|
||||
v.iter()
|
||||
.map(|s| s.0.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join("")));
|
||||
}
|
||||
let mut ret = toc_builder.map_or(Ok(()), |builder| {
|
||||
write!(w, "<nav id=\"TOC\">{}</nav>", builder.into_toc())
|
||||
});
|
||||
|
||||
if ret.is_ok() {
|
||||
ret = w.write_str(&buffer);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector, position: Span) {
|
||||
@ -755,17 +501,45 @@ impl LangString {
|
||||
|
||||
impl<'a> fmt::Display for Markdown<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Markdown(md, shorter) = *self;
|
||||
let Markdown(md) = *self;
|
||||
// This is actually common enough to special-case
|
||||
if md.is_empty() { return Ok(()) }
|
||||
render(fmt, md, false, shorter)
|
||||
|
||||
let mut opts = Options::empty();
|
||||
opts.insert(OPTION_ENABLE_TABLES);
|
||||
opts.insert(OPTION_ENABLE_FOOTNOTES);
|
||||
|
||||
let p = Parser::new_ext(md, opts);
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
html::push_html(&mut s,
|
||||
Footnotes::new(CodeBlocks::new(HeadingLinks::new(p, None))));
|
||||
|
||||
fmt.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for MarkdownWithToc<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let MarkdownWithToc(md) = *self;
|
||||
render(fmt, md, true, MarkdownOutputStyle::Fancy)
|
||||
|
||||
let mut opts = Options::empty();
|
||||
opts.insert(OPTION_ENABLE_TABLES);
|
||||
opts.insert(OPTION_ENABLE_FOOTNOTES);
|
||||
|
||||
let p = Parser::new_ext(md, opts);
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
let mut toc = TocBuilder::new();
|
||||
|
||||
html::push_html(&mut s,
|
||||
Footnotes::new(CodeBlocks::new(HeadingLinks::new(p, Some(&mut toc)))));
|
||||
|
||||
write!(fmt, "<nav id=\"TOC\">{}</nav>", toc.into_toc())?;
|
||||
|
||||
fmt.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
@ -774,7 +548,41 @@ impl<'a> fmt::Display for MarkdownHtml<'a> {
|
||||
let MarkdownHtml(md) = *self;
|
||||
// This is actually common enough to special-case
|
||||
if md.is_empty() { return Ok(()) }
|
||||
render(fmt, md, false, MarkdownOutputStyle::Fancy)
|
||||
|
||||
let mut opts = Options::empty();
|
||||
opts.insert(OPTION_ENABLE_TABLES);
|
||||
opts.insert(OPTION_ENABLE_FOOTNOTES);
|
||||
|
||||
let p = Parser::new_ext(md, opts);
|
||||
|
||||
// Treat inline HTML as plain text.
|
||||
let p = p.map(|event| match event {
|
||||
Event::Html(text) | Event::InlineHtml(text) => Event::Text(text),
|
||||
_ => event
|
||||
});
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
html::push_html(&mut s,
|
||||
Footnotes::new(CodeBlocks::new(HeadingLinks::new(p, None))));
|
||||
|
||||
fmt.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for MarkdownSummaryLine<'a> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let MarkdownSummaryLine(md) = *self;
|
||||
// This is actually common enough to special-case
|
||||
if md.is_empty() { return Ok(()) }
|
||||
|
||||
let p = Parser::new(md);
|
||||
|
||||
let mut s = String::new();
|
||||
|
||||
html::push_html(&mut s, SummaryLine::new(p));
|
||||
|
||||
fmt.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
@ -796,14 +604,10 @@ pub fn plain_summary_line(md: &str) -> String {
|
||||
let next_event = next_event.unwrap();
|
||||
let (ret, is_in) = match next_event {
|
||||
Event::Start(Tag::Paragraph) => (None, 1),
|
||||
Event::Start(Tag::Link(_, ref t)) if !self.is_first => {
|
||||
(Some(t.as_ref().to_owned()), 1)
|
||||
}
|
||||
Event::Start(Tag::Code) => (Some("`".to_owned()), 1),
|
||||
Event::End(Tag::Code) => (Some("`".to_owned()), -1),
|
||||
Event::Start(Tag::Header(_)) => (None, 1),
|
||||
Event::Text(ref s) if self.is_in > 0 => (Some(s.as_ref().to_owned()), 0),
|
||||
Event::End(Tag::Link(_, ref t)) => (Some(t.as_ref().to_owned()), -1),
|
||||
Event::End(Tag::Paragraph) | Event::End(Tag::Header(_)) => (None, -1),
|
||||
_ => (None, 0),
|
||||
};
|
||||
@ -834,7 +638,7 @@ pub fn plain_summary_line(md: &str) -> String {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{LangString, Markdown, MarkdownHtml, MarkdownOutputStyle};
|
||||
use super::{LangString, Markdown, MarkdownHtml};
|
||||
use super::plain_summary_line;
|
||||
use html::render::reset_ids;
|
||||
|
||||
@ -874,14 +678,14 @@ mod tests {
|
||||
#[test]
|
||||
fn issue_17736() {
|
||||
let markdown = "# title";
|
||||
format!("{}", Markdown(markdown, MarkdownOutputStyle::Fancy));
|
||||
format!("{}", Markdown(markdown));
|
||||
reset_ids(true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_header() {
|
||||
fn t(input: &str, expect: &str) {
|
||||
let output = format!("{}", Markdown(input, MarkdownOutputStyle::Fancy));
|
||||
let output = format!("{}", Markdown(input));
|
||||
assert_eq!(output, expect, "original: {}", input);
|
||||
reset_ids(true);
|
||||
}
|
||||
@ -903,7 +707,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_header_ids_multiple_blocks() {
|
||||
fn t(input: &str, expect: &str) {
|
||||
let output = format!("{}", Markdown(input, MarkdownOutputStyle::Fancy));
|
||||
let output = format!("{}", Markdown(input));
|
||||
assert_eq!(output, expect, "original: {}", input);
|
||||
}
|
||||
|
||||
@ -934,6 +738,7 @@ mod tests {
|
||||
}
|
||||
|
||||
t("hello [Rust](https://www.rust-lang.org) :)", "hello Rust :)");
|
||||
t("hello [Rust](https://www.rust-lang.org \"Rust\") :)", "hello Rust :)");
|
||||
t("code `let x = i32;` ...", "code `let x = i32;` ...");
|
||||
t("type `Type<'static>` ...", "type `Type<'static>` ...");
|
||||
t("# top header", "top header");
|
||||
@ -947,7 +752,8 @@ mod tests {
|
||||
assert_eq!(output, expect, "original: {}", input);
|
||||
}
|
||||
|
||||
t("`Struct<'a, T>`", "<p><code>Struct<'a, T></code></p>");
|
||||
t("Struct<'a, T>", "<p>Struct<'a, T></p>");
|
||||
t("`Struct<'a, T>`", "<p><code>Struct<'a, T></code></p>\n");
|
||||
t("Struct<'a, T>", "<p>Struct<'a, T></p>\n");
|
||||
t("Struct<br>", "<p>Struct<br></p>\n");
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ use html::format::{TyParamBounds, WhereClause, href, AbiSpace};
|
||||
use html::format::{VisSpace, Method, UnsafetySpace, MutableSpace};
|
||||
use html::format::fmt_impl_for_trait_page;
|
||||
use html::item_type::ItemType;
|
||||
use html::markdown::{self, Markdown, MarkdownHtml, MarkdownOutputStyle};
|
||||
use html::markdown::{self, Markdown, MarkdownHtml, MarkdownSummaryLine};
|
||||
use html::{highlight, layout};
|
||||
|
||||
/// A pair of name and its optional document.
|
||||
@ -1651,7 +1651,7 @@ fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLin
|
||||
format!("{}", &plain_summary_line(Some(s)))
|
||||
};
|
||||
write!(w, "<div class='docblock'>{}</div>",
|
||||
Markdown(&markdown, MarkdownOutputStyle::Fancy))?;
|
||||
Markdown(&markdown))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -1684,8 +1684,7 @@ fn get_doc_value(item: &clean::Item) -> Option<&str> {
|
||||
fn document_full(w: &mut fmt::Formatter, item: &clean::Item) -> fmt::Result {
|
||||
if let Some(s) = get_doc_value(item) {
|
||||
write!(w, "<div class='docblock'>{}</div>",
|
||||
Markdown(&format!("{}{}", md_render_assoc_item(item), s),
|
||||
MarkdownOutputStyle::Fancy))?;
|
||||
Markdown(&format!("{}{}", md_render_assoc_item(item), s)))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -1873,8 +1872,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
|
||||
</tr>",
|
||||
name = *myitem.name.as_ref().unwrap(),
|
||||
stab_docs = stab_docs,
|
||||
docs = shorter(Some(&Markdown(doc_value,
|
||||
MarkdownOutputStyle::Compact).to_string())),
|
||||
docs = MarkdownSummaryLine(doc_value),
|
||||
class = myitem.type_(),
|
||||
stab = myitem.stability_class().unwrap_or("".to_string()),
|
||||
unsafety_flag = unsafety_flag,
|
||||
@ -2904,7 +2902,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi
|
||||
write!(w, "</span>")?;
|
||||
write!(w, "</h3>\n")?;
|
||||
if let Some(ref dox) = i.impl_item.doc_value() {
|
||||
write!(w, "<div class='docblock'>{}</div>", Markdown(dox, MarkdownOutputStyle::Fancy))?;
|
||||
write!(w, "<div class='docblock'>{}</div>", Markdown(dox))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,7 @@ use externalfiles::{ExternalHtml, LoadStringError, load_string};
|
||||
use html::render::reset_ids;
|
||||
use html::escape::Escape;
|
||||
use html::markdown;
|
||||
use html::markdown::{Markdown, MarkdownWithToc, MarkdownOutputStyle, find_testable_code};
|
||||
use html::markdown::{Markdown, MarkdownWithToc, find_testable_code};
|
||||
use test::{TestOptions, Collector};
|
||||
|
||||
/// Separate any lines at the start of the file that begin with `# ` or `%`.
|
||||
@ -96,7 +96,7 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,
|
||||
let rendered = if include_toc {
|
||||
format!("{}", MarkdownWithToc(text))
|
||||
} else {
|
||||
format!("{}", Markdown(text, MarkdownOutputStyle::Fancy))
|
||||
format!("{}", Markdown(text))
|
||||
};
|
||||
|
||||
let err = write!(
|
||||
|
@ -1176,6 +1176,7 @@ impl AsInner<fs_imp::DirEntry> for DirEntry {
|
||||
/// This function currently corresponds to the `unlink` function on Unix
|
||||
/// and the `DeleteFile` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1212,6 +1213,7 @@ pub fn remove_file<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// This function currently corresponds to the `stat` function on Unix
|
||||
/// and the `GetFileAttributesEx` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1245,6 +1247,7 @@ pub fn metadata<P: AsRef<Path>>(path: P) -> io::Result<Metadata> {
|
||||
/// This function currently corresponds to the `lstat` function on Unix
|
||||
/// and the `GetFileAttributesEx` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1287,6 +1290,7 @@ pub fn symlink_metadata<P: AsRef<Path>>(path: P) -> io::Result<Metadata> {
|
||||
/// on Windows, `from` can be anything, but `to` must *not* be a directory.
|
||||
///
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1330,6 +1334,7 @@ pub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<()>
|
||||
/// `O_CLOEXEC` is set for returned file descriptors.
|
||||
/// On Windows, this function currently corresponds to `CopyFileEx`.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1366,6 +1371,7 @@ pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<u64> {
|
||||
/// This function currently corresponds to the `link` function on Unix
|
||||
/// and the `CreateHardLink` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1424,6 +1430,7 @@ pub fn soft_link<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> io::Result<(
|
||||
/// and the `CreateFile` function with `FILE_FLAG_OPEN_REPARSE_POINT` and
|
||||
/// `FILE_FLAG_BACKUP_SEMANTICS` flags on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1457,6 +1464,7 @@ pub fn read_link<P: AsRef<Path>>(path: P) -> io::Result<PathBuf> {
|
||||
/// This function currently corresponds to the `realpath` function on Unix
|
||||
/// and the `CreateFile` and `GetFinalPathNameByHandle` functions on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1489,6 +1497,7 @@ pub fn canonicalize<P: AsRef<Path>>(path: P) -> io::Result<PathBuf> {
|
||||
/// This function currently corresponds to the `mkdir` function on Unix
|
||||
/// and the `CreateDirectory` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1522,6 +1531,7 @@ pub fn create_dir<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// This function currently corresponds to the `mkdir` function on Unix
|
||||
/// and the `CreateDirectory` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1562,6 +1572,7 @@ pub fn create_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// This function currently corresponds to the `rmdir` function on Unix
|
||||
/// and the `RemoveDirectory` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1599,6 +1610,7 @@ pub fn remove_dir<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// and the `FindFirstFile`, `GetFileAttributesEx`, `DeleteFile`, and `RemoveDirectory` functions
|
||||
/// on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1633,6 +1645,7 @@ pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// This function currently corresponds to the `opendir` function on Unix
|
||||
/// and the `FindFirstFile` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
@ -1679,6 +1692,7 @@ pub fn read_dir<P: AsRef<Path>>(path: P) -> io::Result<ReadDir> {
|
||||
/// This function currently corresponds to the `chmod` function on Unix
|
||||
/// and the `SetFileAttributes` function on Windows.
|
||||
/// Note that, this [may change in the future][changes].
|
||||
///
|
||||
/// [changes]: ../io/index.html#platform-specific-behavior
|
||||
///
|
||||
/// # Errors
|
||||
|
@ -21,12 +21,12 @@ use memchr;
|
||||
/// The `BufReader` struct adds buffering to any reader.
|
||||
///
|
||||
/// It can be excessively inefficient to work directly with a [`Read`] instance.
|
||||
/// For example, every call to [`read`] on [`TcpStream`] results in a system call.
|
||||
/// A `BufReader` performs large, infrequent reads on the underlying [`Read`]
|
||||
/// and maintains an in-memory buffer of the results.
|
||||
/// For example, every call to [`read`][`TcpStream::read`] on [`TcpStream`]
|
||||
/// results in a system call. A `BufReader` performs large, infrequent reads on
|
||||
/// the underlying [`Read`] and maintains an in-memory buffer of the results.
|
||||
///
|
||||
/// [`Read`]: ../../std/io/trait.Read.html
|
||||
/// [`read`]: ../../std/net/struct.TcpStream.html#method.read
|
||||
/// [`TcpStream::read`]: ../../std/net/struct.TcpStream.html#method.read
|
||||
/// [`TcpStream`]: ../../std/net/struct.TcpStream.html
|
||||
///
|
||||
/// # Examples
|
||||
@ -261,9 +261,10 @@ impl<R: Seek> Seek for BufReader<R> {
|
||||
/// Wraps a writer and buffers its output.
|
||||
///
|
||||
/// It can be excessively inefficient to work directly with something that
|
||||
/// implements [`Write`]. For example, every call to [`write`] on [`TcpStream`]
|
||||
/// results in a system call. A `BufWriter` keeps an in-memory buffer of data
|
||||
/// and writes it to an underlying writer in large, infrequent batches.
|
||||
/// implements [`Write`]. For example, every call to
|
||||
/// [`write`][`Tcpstream::write`] on [`TcpStream`] results in a system call. A
|
||||
/// `BufWriter` keeps an in-memory buffer of data and writes it to an underlying
|
||||
/// writer in large, infrequent batches.
|
||||
///
|
||||
/// The buffer will be written out when the writer is dropped.
|
||||
///
|
||||
@ -303,7 +304,7 @@ impl<R: Seek> Seek for BufReader<R> {
|
||||
/// the `stream` is dropped.
|
||||
///
|
||||
/// [`Write`]: ../../std/io/trait.Write.html
|
||||
/// [`write`]: ../../std/net/struct.TcpStream.html#method.write
|
||||
/// [`Tcpstream::write`]: ../../std/net/struct.TcpStream.html#method.write
|
||||
/// [`TcpStream`]: ../../std/net/struct.TcpStream.html
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct BufWriter<W: Write> {
|
||||
|
@ -21,7 +21,8 @@
|
||||
//! of other types, and you can implement them for your types too. As such,
|
||||
//! you'll see a few different types of I/O throughout the documentation in
|
||||
//! this module: [`File`]s, [`TcpStream`]s, and sometimes even [`Vec<T>`]s. For
|
||||
//! example, [`Read`] adds a [`read`] method, which we can use on `File`s:
|
||||
//! example, [`Read`] adds a [`read`][`Read::read`] method, which we can use on
|
||||
//! `File`s:
|
||||
//!
|
||||
//! ```
|
||||
//! use std::io;
|
||||
@ -106,7 +107,7 @@
|
||||
//! ```
|
||||
//!
|
||||
//! [`BufWriter`] doesn't add any new ways of writing; it just buffers every call
|
||||
//! to [`write`]:
|
||||
//! to [`write`][`Write::write`]:
|
||||
//!
|
||||
//! ```
|
||||
//! use std::io;
|
||||
@ -257,13 +258,13 @@
|
||||
//! [`Vec<T>`]: ../vec/struct.Vec.html
|
||||
//! [`BufReader`]: struct.BufReader.html
|
||||
//! [`BufWriter`]: struct.BufWriter.html
|
||||
//! [`write`]: trait.Write.html#tymethod.write
|
||||
//! [`Write::write`]: trait.Write.html#tymethod.write
|
||||
//! [`io::stdout`]: fn.stdout.html
|
||||
//! [`println!`]: ../macro.println.html
|
||||
//! [`Lines`]: struct.Lines.html
|
||||
//! [`io::Result`]: type.Result.html
|
||||
//! [`?` operator]: ../../book/syntax-index.html
|
||||
//! [`read`]: trait.Read.html#tymethod.read
|
||||
//! [`Read::read`]: trait.Read.html#tymethod.read
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
|
@ -58,7 +58,7 @@ pub struct TcpStream(net_imp::TcpStream);
|
||||
///
|
||||
/// After creating a `TcpListener` by [`bind`]ing it to a socket address, it listens
|
||||
/// for incoming TCP connections. These can be accepted by calling [`accept`] or by
|
||||
/// iterating over the [`Incoming`] iterator returned by [`incoming`].
|
||||
/// iterating over the [`Incoming`] iterator returned by [`incoming`][`TcpListener::incoming`].
|
||||
///
|
||||
/// The socket will be closed when the value is dropped.
|
||||
///
|
||||
@ -68,7 +68,7 @@ pub struct TcpStream(net_imp::TcpStream);
|
||||
/// [`bind`]: #method.bind
|
||||
/// [IETF RFC 793]: https://tools.ietf.org/html/rfc793
|
||||
/// [`Incoming`]: ../../std/net/struct.Incoming.html
|
||||
/// [`incoming`]: #method.incoming
|
||||
/// [`TcpListener::incoming`]: #method.incoming
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -56,14 +56,14 @@
|
||||
//! traits indicate fundamental properties of types.
|
||||
//! * [`std::ops`]::{[`Drop`], [`Fn`], [`FnMut`], [`FnOnce`]}. Various
|
||||
//! operations for both destructors and overloading `()`.
|
||||
//! * [`std::mem`]::[`drop`], a convenience function for explicitly dropping a
|
||||
//! value.
|
||||
//! * [`std::mem`]::[`drop`][`mem::drop`], a convenience function for explicitly
|
||||
//! dropping a value.
|
||||
//! * [`std::boxed`]::[`Box`], a way to allocate values on the heap.
|
||||
//! * [`std::borrow`]::[`ToOwned`], The conversion trait that defines
|
||||
//! [`to_owned`], the generic method for creating an owned type from a
|
||||
//! borrowed type.
|
||||
//! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines [`clone`],
|
||||
//! the method for producing a copy of a value.
|
||||
//! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines
|
||||
//! [`clone`][`Clone::clone`], the method for producing a copy of a value.
|
||||
//! * [`std::cmp`]::{[`PartialEq`], [`PartialOrd`], [`Eq`], [`Ord`] }. The
|
||||
//! comparison traits, which implement the comparison operators and are often
|
||||
//! seen in trait bounds.
|
||||
@ -117,8 +117,8 @@
|
||||
//! [`ToOwned`]: ../borrow/trait.ToOwned.html
|
||||
//! [`ToString`]: ../string/trait.ToString.html
|
||||
//! [`Vec`]: ../vec/struct.Vec.html
|
||||
//! [`clone`]: ../clone/trait.Clone.html#tymethod.clone
|
||||
//! [`drop`]: ../mem/fn.drop.html
|
||||
//! [`Clone::clone`]: ../clone/trait.Clone.html#tymethod.clone
|
||||
//! [`mem::drop`]: ../mem/fn.drop.html
|
||||
//! [`std::borrow`]: ../borrow/index.html
|
||||
//! [`std::boxed`]: ../boxed/index.html
|
||||
//! [`std::clone`]: ../clone/index.html
|
||||
|
@ -1070,6 +1070,27 @@ pub fn exit(code: i32) -> ! {
|
||||
/// // execution never gets here
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The [`abort`] function terminates the process, so the destructor will not
|
||||
/// get run on the example below:
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::process;
|
||||
///
|
||||
/// struct HasDrop;
|
||||
///
|
||||
/// impl Drop for HasDrop {
|
||||
/// fn drop(&mut self) {
|
||||
/// println!("This will never be printed!");
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let _x = HasDrop;
|
||||
/// process::abort();
|
||||
/// // the destructor implemented for HasDrop will never get run
|
||||
/// }
|
||||
/// ```
|
||||
#[stable(feature = "process_abort", since = "1.17.0")]
|
||||
pub fn abort() -> ! {
|
||||
unsafe { ::sys::abort_internal() };
|
||||
|
@ -144,7 +144,7 @@ pub trait OpenOptionsExt {
|
||||
/// `CreateFile`).
|
||||
///
|
||||
/// If a _new_ file is created because it does not yet exist and
|
||||
///`.create(true)` or `.create_new(true)` are specified, the new file is
|
||||
/// `.create(true)` or `.create_new(true)` are specified, the new file is
|
||||
/// given the attributes declared with `.attributes()`.
|
||||
///
|
||||
/// If an _existing_ file is opened with `.create(true).truncate(true)`, its
|
||||
|
@ -104,6 +104,7 @@ pub trait CommandExt {
|
||||
/// Sets the [process creation flags][1] to be passed to `CreateProcess`.
|
||||
///
|
||||
/// These will always be ORed with `CREATE_UNICODE_ENVIRONMENT`.
|
||||
///
|
||||
/// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863(v=vs.85).aspx
|
||||
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
|
||||
fn creation_flags(&mut self, flags: u32) -> &mut process::Command;
|
||||
|
@ -90,7 +90,7 @@
|
||||
//! two ways:
|
||||
//!
|
||||
//! * By spawning a new thread, e.g. using the [`thread::spawn`][`spawn`]
|
||||
//! function, and calling [`thread`] on the [`JoinHandle`].
|
||||
//! function, and calling [`thread`][`JoinHandle::thread`] on the [`JoinHandle`].
|
||||
//! * By requesting the current thread, using the [`thread::current`] function.
|
||||
//!
|
||||
//! The [`thread::current`] function is available even for threads not spawned
|
||||
@ -151,7 +151,7 @@
|
||||
//! [`Arc`]: ../../std/sync/struct.Arc.html
|
||||
//! [`spawn`]: ../../std/thread/fn.spawn.html
|
||||
//! [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
|
||||
//! [`thread`]: ../../std/thread/struct.JoinHandle.html#method.thread
|
||||
//! [`JoinHandle::thread`]: ../../std/thread/struct.JoinHandle.html#method.thread
|
||||
//! [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
|
||||
//! [`Result`]: ../../std/result/enum.Result.html
|
||||
//! [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
|
||||
|
@ -43,6 +43,8 @@ use std::{mem, ptr, slice, vec};
|
||||
|
||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
|
||||
HashStable};
|
||||
/// An owned smart pointer.
|
||||
#[derive(Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct P<T: ?Sized> {
|
||||
@ -215,3 +217,13 @@ impl<T: Decodable> Decodable for P<[T]> {
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX, T> HashStable<CTX> for P<T>
|
||||
where T: ?Sized + HashStable<CTX>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(**self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,9 @@ use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
|
||||
HashStable};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RcSlice<T> {
|
||||
data: Rc<Box<[T]>>,
|
||||
@ -41,3 +44,13 @@ impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
|
||||
fmt::Debug::fmt(self.deref(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX, T> HashStable<CTX> for RcSlice<T>
|
||||
where T: HashStable<CTX>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(**self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,10 @@
|
||||
#![deny(const_err)]
|
||||
|
||||
const X: *const u8 = b"" as _;
|
||||
const Y: bool = 'A' == 'B';
|
||||
const Z: char = 'A';
|
||||
const W: bool = Z <= 'B';
|
||||
|
||||
|
||||
fn main() {
|
||||
let _ = ((-1 as i8) << 8 - 1) as f32;
|
||||
|
@ -13,7 +13,8 @@
|
||||
// ignore-tidy-end-whitespace
|
||||
|
||||
// @has foo/fn.f.html
|
||||
// @has - '<p>hard break:<br>after hard break</p>'
|
||||
// @has - '<p>hard break:<br />'
|
||||
// @has - 'after hard break</p>'
|
||||
/// hard break:
|
||||
/// after hard break
|
||||
pub fn f() {}
|
||||
|
@ -13,16 +13,21 @@
|
||||
// ignore-tidy-linelength
|
||||
|
||||
// @has foo/fn.f.html
|
||||
// @has - '<p>markdown test</p><p>this is a <a href="https://example.com" title="this is a title">link</a>.</p><p>hard break: after hard break</p><hr><p>a footnote<sup id="supref1"><a href="#ref1">1</a></sup>.</p><p>another footnote<sup id="supref2"><a href="#ref2">2</a></sup>.</p><p><img src="https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png" alt="Rust"></p><div class="footnotes"><hr><ol><li id="ref1"><p>Thing <a href="#supref1" rev="footnote">↩</a></p></li><li id="ref2"><p>Another Thing <a href="#supref2" rev="footnote">↩</a></p></li></ol></div>'
|
||||
// @has - '<p>markdown test</p>'
|
||||
// @has - '<p>this is a <a href="https://example.com" title="this is a title">link</a>.</p>'
|
||||
// @has - '<hr />'
|
||||
// @has - '<p>a footnote<sup id="supref1"><a href="#ref1">1</a></sup>.</p>'
|
||||
// @has - '<p>another footnote<sup id="supref2"><a href="#ref2">2</a></sup>.</p>'
|
||||
// @has - '<p><img src="https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png" alt="Rust" /></p>'
|
||||
// @has - '<div class="footnotes"><hr><ol><li id="ref1">'
|
||||
// @has - '<p>Thing <a href="#supref1" rev="footnote">↩</a></p></li><li id="ref2">'
|
||||
// @has - '<p>Another Thing <a href="#supref2" rev="footnote">↩</a></p></li></ol></div>'
|
||||
/// markdown test
|
||||
///
|
||||
/// this is a [link].
|
||||
///
|
||||
/// [link]: https://example.com "this is a title"
|
||||
///
|
||||
/// hard break:
|
||||
/// after hard break
|
||||
///
|
||||
/// -----------
|
||||
///
|
||||
/// a footnote[^footnote].
|
||||
@ -36,5 +41,4 @@
|
||||
///
|
||||
///
|
||||
/// ![Rust](https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png)
|
||||
#[deprecated(note = "Struct<T>")]
|
||||
pub fn f() {}
|
||||
|
@ -10,10 +10,11 @@
|
||||
|
||||
#![crate_name = "foo"]
|
||||
|
||||
// ignore-tidy-linelength
|
||||
|
||||
// @has foo/fn.f.html
|
||||
// @has - "<pre class='rust fn'>pub fn f()</pre><div class='docblock'><ol><li>list<ol><li>fooooo</li><li>x</li></ol></li><li>foo</li></ol>"
|
||||
// @has - //ol/li "list"
|
||||
// @has - //ol/li/ol/li "fooooo"
|
||||
// @has - //ol/li/ol/li "x"
|
||||
// @has - //ol/li "foo"
|
||||
/// 1. list
|
||||
/// 1. fooooo
|
||||
/// 2. x
|
||||
@ -21,7 +22,10 @@
|
||||
pub fn f() {}
|
||||
|
||||
// @has foo/fn.foo2.html
|
||||
// @has - "<pre class='rust fn'>pub fn foo2()</pre><div class='docblock'><ul><li>normal list<ul><li><p>sub list</p></li><li><p>new elem still same elem</p><p>and again same elem!</p></li></ul></li><li>new big elem</li></ul>"
|
||||
// @has - //ul/li "normal list"
|
||||
// @has - //ul/li/ul/li "sub list"
|
||||
// @has - //ul/li/ul/li "new elem still same elem and again same elem!"
|
||||
// @has - //ul/li "new big elem"
|
||||
/// * normal list
|
||||
/// * sub list
|
||||
/// * new elem
|
||||
@ -29,4 +33,4 @@ pub fn f() {}
|
||||
///
|
||||
/// and again same elem!
|
||||
/// * new big elem
|
||||
pub fn foo2() {}
|
||||
pub fn foo2() {}
|
||||
|
@ -51,3 +51,9 @@ pub fn with_arg(z: Z, w: &Z) {
|
||||
let _ = &mut z.x;
|
||||
let _ = &mut w.x;
|
||||
}
|
||||
|
||||
pub fn with_tuple() {
|
||||
let mut y = 0;
|
||||
let x = (&y,);
|
||||
*x.0 = 1;
|
||||
}
|
||||
|
@ -90,5 +90,11 @@ error: cannot borrow immutable field `w.x` as mutable
|
||||
52 | let _ = &mut w.x;
|
||||
| ^^^ cannot mutably borrow immutable field
|
||||
|
||||
error: aborting due to 11 previous errors
|
||||
error: cannot assign to immutable borrowed content `*x.0`
|
||||
--> $DIR/issue-39544.rs:58:5
|
||||
|
|
||||
58 | *x.0 = 1;
|
||||
| ^^^^^^^^ cannot borrow as mutable
|
||||
|
||||
error: aborting due to 12 previous errors
|
||||
|
||||
|
@ -24,7 +24,7 @@ use std::path::PathBuf;
|
||||
|
||||
use syntax::diagnostics::metadata::{get_metadata_dir, ErrorMetadataMap, ErrorMetadata};
|
||||
|
||||
use rustdoc::html::markdown::{Markdown, MarkdownOutputStyle, PLAYGROUND};
|
||||
use rustdoc::html::markdown::{Markdown, PLAYGROUND};
|
||||
use rustc_serialize::json;
|
||||
|
||||
enum OutputFormat {
|
||||
@ -100,7 +100,7 @@ impl Formatter for HTMLFormatter {
|
||||
|
||||
// Description rendered as markdown.
|
||||
match info.description {
|
||||
Some(ref desc) => write!(output, "{}", Markdown(desc, MarkdownOutputStyle::Fancy))?,
|
||||
Some(ref desc) => write!(output, "{}", Markdown(desc))?,
|
||||
None => write!(output, "<p>No description.</p>\n")?,
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user