Auto merge of #41237 - frewsxcv:rollup, r=frewsxcv

Rollup of 8 pull requests

- Successful merges: #40377, #40559, #41173, #41202, #41204, #41209, #41216, #41231
- Failed merges:
This commit is contained in:
bors 2017-04-12 00:45:49 +00:00
commit da32752d92
33 changed files with 399 additions and 131 deletions

View File

@ -147,7 +147,7 @@ pub fn llvm(build: &Build, target: &str) {
}
if env::var_os("SCCACHE_ERROR_LOG").is_some() {
cfg.env("RUST_LOG", "sccache=debug");
cfg.env("RUST_LOG", "sccache=info");
}
// FIXME: we don't actually need to build all LLVM tools and all LLVM

View File

@ -38,7 +38,6 @@ if [ "$SCCACHE_BUCKET" != "" ]; then
args="$args --env AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID"
args="$args --env AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY"
args="$args --env SCCACHE_ERROR_LOG=/tmp/sccache/sccache.log"
args="$args --env SCCACHE_LOG_LEVEL=debug"
args="$args --volume $objdir/tmp:/tmp/sccache"
else
mkdir -p $HOME/.cache/sccache

View File

@ -114,6 +114,7 @@
- [loop_break_value](loop-break-value.md)
- [macro_reexport](macro-reexport.md)
- [main](main.md)
- [manually_drop](manually-drop.md)
- [map_entry_recover_keys](map-entry-recover-keys.md)
- [mpsc_select](mpsc-select.md)
- [n16](n16.md)

View File

@ -44,6 +44,7 @@
#![feature(heap_api)]
#![feature(inclusive_range)]
#![feature(lang_items)]
#![feature(manually_drop)]
#![feature(nonzero)]
#![feature(pattern)]
#![feature(placement_in)]

View File

@ -1558,7 +1558,7 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
// performance than with the 2nd method.
//
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
let mut tmp = NoDrop { value: ptr::read(&v[0]) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
// Intermediate state of the insertion process is always tracked by `hole`, which
// serves two purposes:
@ -1571,13 +1571,13 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
// initially held exactly once.
let mut hole = InsertionHole {
src: &mut tmp.value,
src: &mut *tmp,
dest: &mut v[1],
};
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
for i in 2..v.len() {
if !is_less(&v[i], &tmp.value) {
if !is_less(&v[i], &*tmp) {
break;
}
ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
@ -1587,12 +1587,6 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
}
}
// Holds a value, but never drops it.
#[allow(unions_with_drop_fields)]
union NoDrop<T> {
value: T
}
// When dropped, copies from `src` into `dest`.
struct InsertionHole<T> {
src: *mut T,

View File

@ -691,9 +691,6 @@ extern "rust-intrinsic" {
/// initialize memory previous set to the result of `uninit`.
pub fn uninit<T>() -> T;
/// Moves a value out of scope without running drop glue.
pub fn forget<T>(_: T) -> ();
/// Reinterprets the bits of a value of one type as another type.
///
/// Both types must have the same size. Neither the original, nor the result,

View File

@ -1532,14 +1532,18 @@ pub trait Iterator {
/// Stopping at the first `true`:
///
/// ```
/// let a = [1, 2, 3];
/// let a = [1, 2, 3, 4];
///
/// let mut iter = a.iter();
///
/// assert_eq!(iter.position(|&x| x == 2), Some(1));
/// assert_eq!(iter.position(|&x| x >= 2), Some(1));
///
/// // we can still use `iter`, as there are more elements.
/// assert_eq!(iter.next(), Some(&3));
///
/// // The returned index depends on iterator state
/// assert_eq!(iter.position(|&x| x == 4), Some(0));
///
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -171,7 +171,7 @@ pub use intrinsics::transmute;
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn forget<T>(t: T) {
unsafe { intrinsics::forget(t) }
ManuallyDrop::new(t);
}
/// Returns the size of a type in bytes.
@ -736,3 +736,121 @@ pub fn discriminant<T>(v: &T) -> Discriminant<T> {
}
}
/// A wrapper to inhibit compiler from automatically calling `T`s destructor.
///
/// This wrapper is 0-cost.
///
/// # Examples
///
/// This wrapper helps with explicitly documenting the drop order dependencies between fields of
/// the type:
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// struct Peach;
/// struct Banana;
/// struct Melon;
/// struct FruitBox {
/// // Immediately clear theres something non-trivial going on with these fields.
/// peach: ManuallyDrop<Peach>,
/// melon: Melon, // Field thats independent of the other two.
/// banana: ManuallyDrop<Banana>,
/// }
///
/// impl Drop for FruitBox {
/// fn drop(&mut self) {
/// unsafe {
/// // Explicit ordering in which field destructors are run specified in the intuitive
/// // location the destructor of the structure containing the fields.
/// // Moreover, one can now reorder fields within the struct however much they want.
/// ManuallyDrop::drop(&mut self.peach);
/// ManuallyDrop::drop(&mut self.banana);
/// }
/// // After destructor for `FruitBox` runs (this function), the destructor for Melon gets
/// // invoked in the usual manner, as it is not wrapped in `ManuallyDrop`.
/// }
/// }
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[allow(unions_with_drop_fields)]
pub union ManuallyDrop<T>{ value: T }
impl<T> ManuallyDrop<T> {
/// Wrap a value to be manually dropped.
///
/// # Examples
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// ManuallyDrop::new(Box::new(()));
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub fn new(value: T) -> ManuallyDrop<T> {
ManuallyDrop { value: value }
}
/// Extract the value from the ManuallyDrop container.
///
/// # Examples
///
/// ```rust
/// # #![feature(manually_drop)]
/// use std::mem::ManuallyDrop;
/// let x = ManuallyDrop::new(Box::new(()));
/// let _: Box<()> = ManuallyDrop::into_inner(x);
/// ```
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub fn into_inner(slot: ManuallyDrop<T>) -> T {
unsafe {
slot.value
}
}
/// Manually drops the contained value.
///
/// # Unsafety
///
/// This function runs the destructor of the contained value and thus the wrapped value
/// now represents uninitialized data. It is up to the user of this method to ensure the
/// uninitialized data is not actually used.
#[unstable(feature = "manually_drop", issue = "40673")]
#[inline]
pub unsafe fn drop(slot: &mut ManuallyDrop<T>) {
ptr::drop_in_place(&mut slot.value)
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T> ::ops::Deref for ManuallyDrop<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe {
&self.value
}
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T> ::ops::DerefMut for ManuallyDrop<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
&mut self.value
}
}
}
#[unstable(feature = "manually_drop", issue = "40673")]
impl<T: ::fmt::Debug> ::fmt::Debug for ManuallyDrop<T> {
fn fmt(&self, fmt: &mut ::fmt::Formatter) -> ::fmt::Result {
unsafe {
fmt.debug_tuple("ManuallyDrop").field(&self.value).finish()
}
}
}

View File

@ -20,12 +20,6 @@ use cmp;
use mem;
use ptr;
/// Holds a value, but never drops it.
#[allow(unions_with_drop_fields)]
union NoDrop<T> {
value: T
}
/// When dropped, copies from `src` into `dest`.
struct CopyOnDrop<T> {
src: *mut T,
@ -49,15 +43,15 @@ fn shift_head<T, F>(v: &mut [T], is_less: &mut F)
// Read the first element into a stack-allocated variable. If a following comparison
// operation panics, `hole` will get dropped and automatically write the element back
// into the slice.
let mut tmp = NoDrop { value: ptr::read(v.get_unchecked(0)) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
let mut hole = CopyOnDrop {
src: &mut tmp.value,
src: &mut *tmp,
dest: v.get_unchecked_mut(1),
};
ptr::copy_nonoverlapping(v.get_unchecked(1), v.get_unchecked_mut(0), 1);
for i in 2..len {
if !is_less(v.get_unchecked(i), &tmp.value) {
if !is_less(v.get_unchecked(i), &*tmp) {
break;
}
@ -81,15 +75,15 @@ fn shift_tail<T, F>(v: &mut [T], is_less: &mut F)
// Read the last element into a stack-allocated variable. If a following comparison
// operation panics, `hole` will get dropped and automatically write the element back
// into the slice.
let mut tmp = NoDrop { value: ptr::read(v.get_unchecked(len - 1)) };
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
let mut hole = CopyOnDrop {
src: &mut tmp.value,
src: &mut *tmp,
dest: v.get_unchecked_mut(len - 2),
};
ptr::copy_nonoverlapping(v.get_unchecked(len - 2), v.get_unchecked_mut(len - 1), 1);
for i in (0..len-2).rev() {
if !is_less(&tmp.value, v.get_unchecked(i)) {
if !is_less(&*tmp, v.get_unchecked(i)) {
break;
}
@ -403,12 +397,12 @@ fn partition<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool)
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
// operation panics, the pivot will be automatically written back into the slice.
let mut tmp = NoDrop { value: unsafe { ptr::read(pivot) } };
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
let _pivot_guard = CopyOnDrop {
src: unsafe { &mut tmp.value },
src: &mut *tmp,
dest: pivot,
};
let pivot = unsafe { &tmp.value };
let pivot = &*tmp;
// Find the first pair of out-of-order elements.
let mut l = 0;
@ -452,12 +446,12 @@ fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
// operation panics, the pivot will be automatically written back into the slice.
let mut tmp = NoDrop { value: unsafe { ptr::read(pivot) } };
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
let _pivot_guard = CopyOnDrop {
src: unsafe { &mut tmp.value },
src: &mut *tmp,
dest: pivot,
};
let pivot = unsafe { &tmp.value };
let pivot = &*tmp;
// Now partition the slice.
let mut l = 0;

View File

@ -643,6 +643,8 @@ macro_rules! options {
Some("one of: `address`, `leak`, `memory` or `thread`");
pub const parse_linker_flavor: Option<&'static str> =
Some(::rustc_back::LinkerFlavor::one_of());
pub const parse_optimization_fuel: Option<&'static str> =
Some("crate=integer");
}
#[allow(dead_code)]
@ -787,6 +789,21 @@ macro_rules! options {
}
true
}
fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) -> bool {
match v {
None => false,
Some(s) => {
let parts = s.split('=').collect::<Vec<_>>();
if parts.len() != 2 { return false; }
let crate_name = parts[0].to_string();
let fuel = parts[1].parse::<u64>();
if fuel.is_err() { return false; }
*slot = Some((crate_name, fuel.unwrap()));
true
}
}
}
}
) }
@ -991,6 +1008,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"Use a sanitizer"),
linker_flavor: Option<LinkerFlavor> = (None, parse_linker_flavor, [UNTRACKED],
"Linker flavor"),
fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
"Set the optimization fuel quota for a crate."),
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
"Make Rustc print the total optimization fuel used by a crate."),
}
pub fn default_lib_output() -> CrateType {
@ -1784,11 +1805,13 @@ mod dep_tracking {
impl_dep_tracking_hash_via_hash!(bool);
impl_dep_tracking_hash_via_hash!(usize);
impl_dep_tracking_hash_via_hash!(u64);
impl_dep_tracking_hash_via_hash!(String);
impl_dep_tracking_hash_via_hash!(lint::Level);
impl_dep_tracking_hash_via_hash!(Option<bool>);
impl_dep_tracking_hash_via_hash!(Option<usize>);
impl_dep_tracking_hash_via_hash!(Option<String>);
impl_dep_tracking_hash_via_hash!(Option<(String, u64)>);
impl_dep_tracking_hash_via_hash!(Option<PanicStrategy>);
impl_dep_tracking_hash_via_hash!(Option<lint::Level>);
impl_dep_tracking_hash_via_hash!(Option<PathBuf>);
@ -1810,6 +1833,7 @@ mod dep_tracking {
impl_dep_tracking_hash_for_sortable_vec_of!((String, lint::Level));
impl_dep_tracking_hash_for_sortable_vec_of!((String, Option<String>,
Option<cstore::NativeLibraryKind>));
impl_dep_tracking_hash_for_sortable_vec_of!((String, u64));
impl DepTrackingHash for SearchPaths {
fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) {
let mut elems: Vec<_> = self

View File

@ -123,6 +123,20 @@ pub struct Session {
pub code_stats: RefCell<CodeStats>,
next_node_id: Cell<ast::NodeId>,
/// If -zfuel=crate=n is specified, Some(crate).
optimization_fuel_crate: Option<String>,
/// If -zfuel=crate=n is specified, initially set to n. Otherwise 0.
optimization_fuel_limit: Cell<u64>,
/// We're rejecting all further optimizations.
out_of_fuel: Cell<bool>,
// The next two are public because the driver needs to read them.
/// If -zprint-fuel=crate, Some(crate).
pub print_fuel_crate: Option<String>,
/// Always set to zero and incremented so that we can print fuel expended by a crate.
pub print_fuel: Cell<u64>,
}
pub struct PerfStats {
@ -507,6 +521,32 @@ impl Session {
println!("Total time spent decoding DefPath tables: {}",
duration_to_secs_str(self.perf_stats.decode_def_path_tables_time.get()));
}
/// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n.
/// This expends fuel if applicable, and records fuel if applicable.
pub fn consider_optimizing<T: Fn() -> String>(&self, crate_name: &str, msg: T) -> bool {
let mut ret = true;
match self.optimization_fuel_crate {
Some(ref c) if c == crate_name => {
let fuel = self.optimization_fuel_limit.get();
ret = fuel != 0;
if fuel == 0 && !self.out_of_fuel.get() {
println!("optimization-fuel-exhausted: {}", msg());
self.out_of_fuel.set(true);
} else if fuel > 0 {
self.optimization_fuel_limit.set(fuel-1);
}
}
_ => {}
}
match self.print_fuel_crate {
Some(ref c) if c == crate_name=> {
self.print_fuel.set(self.print_fuel.get()+1);
},
_ => {}
}
ret
}
}
pub fn build_session(sopts: config::Options,
@ -602,6 +642,12 @@ pub fn build_session_(sopts: config::Options,
}
);
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref()
.map(|i| i.1).unwrap_or(0));
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = Cell::new(0);
let sess = Session {
dep_graph: dep_graph.clone(),
target: target_cfg,
@ -643,6 +689,11 @@ pub fn build_session_(sopts: config::Options,
decode_def_path_tables_time: Cell::new(Duration::from_secs(0)),
},
code_stats: RefCell::new(CodeStats::new()),
optimization_fuel_crate: optimization_fuel_crate,
optimization_fuel_limit: optimization_fuel_limit,
print_fuel_crate: print_fuel_crate,
print_fuel: print_fuel,
out_of_fuel: Cell::new(false),
};
init_llvm(&sess);

View File

@ -732,6 +732,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
ast_ty_to_ty_cache: RefCell::new(NodeMap()),
}, f)
}
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE).as_str();
self.sess.consider_optimizing(&cname, msg)
}
}
impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {

View File

@ -580,7 +580,6 @@ enum StructKind {
}
impl<'a, 'gcx, 'tcx> Struct {
// FIXME(camlorn): reprs need a better representation to deal with multiple reprs on one type.
fn new(dl: &TargetDataLayout, fields: &Vec<&'a Layout>,
repr: &ReprOptions, kind: StructKind,
scapegoat: Ty<'gcx>) -> Result<Struct, LayoutError<'gcx>> {
@ -598,12 +597,8 @@ impl<'a, 'gcx, 'tcx> Struct {
// Neither do 1-member and 2-member structs.
// In addition, code in trans assume that 2-element structs can become pairs.
// It's easier to just short-circuit here.
let mut can_optimize = (fields.len() > 2 || StructKind::EnumVariant == kind)
&& ! (repr.c || repr.packed);
// Disable field reordering until we can decide what to do.
// The odd pattern here avoids a warning about the value never being read.
if can_optimize { can_optimize = false; }
let can_optimize = (fields.len() > 2 || StructKind::EnumVariant == kind)
&& !(repr.c || repr.packed || repr.linear || repr.simd);
let (optimize, sort_ascending) = match kind {
StructKind::AlwaysSizedUnivariant => (can_optimize, false),

View File

@ -1411,13 +1411,16 @@ pub struct ReprOptions {
pub packed: bool,
pub simd: bool,
pub int: Option<attr::IntType>,
// Internal only for now. If true, don't reorder fields.
pub linear: bool,
}
impl_stable_hash_for!(struct ReprOptions {
c,
packed,
simd,
int
int,
linear
});
impl ReprOptions {
@ -1440,6 +1443,9 @@ impl ReprOptions {
ret.simd = true;
}
// This is here instead of layout because the choice must make it into metadata.
ret.linear = !tcx.consider_optimizing(|| format!("Reorder fields of {:?}",
tcx.item_path_str(did)));
ret
}

View File

@ -20,10 +20,11 @@ use std::fmt;
use std::mem;
use std::collections::range::RangeArgument;
use std::collections::Bound::{Excluded, Included, Unbounded};
use std::mem::ManuallyDrop;
pub unsafe trait Array {
type Element;
type PartialStorage: Default + Unsize<[ManuallyDrop<Self::Element>]>;
type PartialStorage: Unsize<[ManuallyDrop<Self::Element>]>;
const LEN: usize;
}
@ -66,7 +67,7 @@ impl<A: Array> ArrayVec<A> {
pub fn new() -> Self {
ArrayVec {
count: 0,
values: Default::default(),
values: unsafe { ::std::mem::uninitialized() },
}
}
@ -81,7 +82,7 @@ impl<A: Array> ArrayVec<A> {
/// Panics when the stack vector is full.
pub fn push(&mut self, el: A::Element) {
let arr = &mut self.values as &mut [ManuallyDrop<_>];
arr[self.count] = ManuallyDrop { value: el };
arr[self.count] = ManuallyDrop::new(el);
self.count += 1;
}
@ -90,8 +91,8 @@ impl<A: Array> ArrayVec<A> {
let arr = &mut self.values as &mut [ManuallyDrop<_>];
self.count -= 1;
unsafe {
let value = ptr::read(&arr[self.count]);
Some(value.value)
let value = ptr::read(&*arr[self.count]);
Some(value)
}
} else {
None
@ -210,7 +211,7 @@ impl<A: Array> Iterator for Iter<A> {
fn next(&mut self) -> Option<A::Element> {
let arr = &self.store as &[ManuallyDrop<_>];
unsafe {
self.indices.next().map(|i| ptr::read(&arr[i]).value)
self.indices.next().map(|i| ptr::read(&*arr[i]))
}
}
@ -233,7 +234,7 @@ impl<'a, A: Array> Iterator for Drain<'a, A> {
#[inline]
fn next(&mut self) -> Option<A::Element> {
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const ManuallyDrop<_>).value })
self.iter.next().map(|elt| unsafe { ptr::read(&**elt) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
@ -295,25 +296,3 @@ impl<'a, A: Array> IntoIterator for &'a mut ArrayVec<A> {
self.iter_mut()
}
}
// FIXME: This should use repr(transparent) from rust-lang/rfcs#1758.
#[allow(unions_with_drop_fields)]
pub union ManuallyDrop<T> {
value: T,
#[allow(dead_code)]
empty: (),
}
impl<T> ManuallyDrop<T> {
fn new() -> ManuallyDrop<T> {
ManuallyDrop {
empty: ()
}
}
}
impl<T> Default for ManuallyDrop<T> {
fn default() -> Self {
ManuallyDrop::new()
}
}

View File

@ -39,6 +39,7 @@
#![feature(conservative_impl_trait)]
#![feature(discriminant_value)]
#![feature(specialization)]
#![feature(manually_drop)]
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]

View File

@ -517,6 +517,16 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
control.make_glob_map = resolve::MakeGlobMap::Yes;
}
if sess.print_fuel_crate.is_some() {
let old_callback = control.compilation_done.callback;
control.compilation_done.callback = box move |state| {
old_callback(state);
let sess = state.session;
println!("Fuel used by {}: {}",
sess.print_fuel_crate.as_ref().unwrap(),
sess.print_fuel.get());
}
}
control
}
}

View File

@ -16,7 +16,7 @@ use llvm;
use llvm::{ValueRef};
use abi::{Abi, FnType};
use adt;
use mir::lvalue::LvalueRef;
use mir::lvalue::{LvalueRef, Alignment};
use base::*;
use common::*;
use declare;
@ -36,8 +36,6 @@ use syntax_pos::Span;
use std::cmp::Ordering;
use std::iter;
use mir::lvalue::Alignment;
fn get_simple_intrinsic(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
let llvm_name = match name {
"sqrtf32" => "llvm.sqrt.f32",
@ -188,7 +186,7 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
C_nil(ccx)
}
// Effectively no-ops
"uninit" | "forget" => {
"uninit" => {
C_nil(ccx)
}
"needs_drop" => {
@ -622,7 +620,10 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
for i in 0..elems.len() {
let val = bcx.extract_value(val, i);
bcx.store(val, bcx.struct_gep(llresult, i), None);
let lval = LvalueRef::new_sized_ty(llresult, ret_ty,
Alignment::AbiAligned);
let (dest, align) = lval.trans_field_ptr(bcx, i);
bcx.store(val, dest, align.to_align());
}
C_nil(ccx)
}

View File

@ -386,7 +386,7 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
let lvalue = LvalueRef::alloca(bcx, arg_ty, &format!("arg{}", arg_index));
for (i, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() {
let dst = bcx.struct_gep(lvalue.llval, i);
let (dst, _) = lvalue.trans_field_ptr(bcx, i);
let arg = &mircx.fn_ty.args[idx];
idx += 1;
if common::type_is_fat_ptr(bcx.ccx, tupled_arg_ty) {

View File

@ -124,7 +124,6 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
"rustc_peek" => (1, vec![param(0)], param(0)),
"init" => (1, Vec::new(), param(0)),
"uninit" => (1, Vec::new(), param(0)),
"forget" => (1, vec![ param(0) ], tcx.mk_nil()),
"transmute" => (2, vec![ param(0) ], param(1)),
"move_val_init" => {
(1,

View File

@ -469,22 +469,28 @@ impl LangString {
);
for token in tokens {
match token {
match token.trim() {
"" => {},
"should_panic" => { data.should_panic = true; seen_rust_tags = true; },
"no_run" => { data.no_run = true; seen_rust_tags = true; },
"ignore" => { data.ignore = true; seen_rust_tags = true; },
"rust" => { data.rust = true; seen_rust_tags = true; },
"test_harness" => { data.test_harness = true; seen_rust_tags = true; },
"should_panic" => {
data.should_panic = true;
seen_rust_tags = seen_other_tags == false;
}
"no_run" => { data.no_run = true; seen_rust_tags = !seen_other_tags; }
"ignore" => { data.ignore = true; seen_rust_tags = !seen_other_tags; }
"rust" => { data.rust = true; seen_rust_tags = true; }
"test_harness" => {
data.test_harness = true;
seen_rust_tags = !seen_other_tags || seen_rust_tags;
}
"compile_fail" if allow_compile_fail => {
data.compile_fail = true;
seen_rust_tags = true;
seen_rust_tags = !seen_other_tags || seen_rust_tags;
data.no_run = true;
}
x if allow_error_code_check && x.starts_with("E") && x.len() == 5 => {
if let Ok(_) = x[1..].parse::<u32>() {
data.error_codes.push(x.to_owned());
seen_rust_tags = true;
seen_rust_tags = !seen_other_tags || seen_rust_tags;
} else {
seen_other_tags = true;
}
@ -670,9 +676,11 @@ mod tests {
t("test_harness", false, false, false, true, true, false, Vec::new());
t("compile_fail", false, true, false, true, false, true, Vec::new());
t("{.no_run .example}", false, true, false, true, false, false, Vec::new());
t("{.sh .should_panic}", true, false, false, true, false, false, Vec::new());
t("{.sh .should_panic}", true, false, false, false, false, false, Vec::new());
t("{.example .rust}", false, false, false, true, false, false, Vec::new());
t("{.test_harness .rust}", false, false, false, true, true, false, Vec::new());
t("text, no_run", false, true, false, false, false, false, Vec::new());
t("text,no_run", false, true, false, false, false, false, Vec::new());
}
#[test]

View File

@ -277,7 +277,7 @@ mod prim_pointer { }
/// Arrays of sizes from 0 to 32 (inclusive) implement the following traits if
/// the element type allows it:
///
/// - [`Clone`][clone] (only if `T: [Copy][copy]`)
/// - [`Clone`][clone] (only if `T: `[`Copy`][copy])
/// - [`Debug`][debug]
/// - [`IntoIterator`][intoiterator] (implemented for `&[T; N]` and `&mut [T; N]`)
/// - [`PartialEq`][partialeq], [`PartialOrd`][partialord], [`Eq`][eq], [`Ord`][ord]

View File

@ -10,10 +10,9 @@
#![feature(core_intrinsics)]
use std::intrinsics::{init, forget};
use std::intrinsics::{init};
// Test that the `forget` and `init` intrinsics are really unsafe
pub fn main() {
let stuff = init::<isize>(); //~ ERROR call to unsafe function requires unsafe
forget(stuff); //~ ERROR call to unsafe function requires unsafe
}

View File

@ -0,0 +1,24 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="foo"]
use std::mem::size_of;
// compile-flags: -Z fuel=foo=0
struct S1(u8, u16, u8);
struct S2(u8, u16, u8);
fn main() {
assert_eq!(size_of::<S1>(), 6);
assert_eq!(size_of::<S2>(), 6);
}

View File

@ -0,0 +1,26 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="foo"]
use std::mem::size_of;
// compile-flags: -Z fuel=foo=1
struct S1(u8, u16, u8);
struct S2(u8, u16, u8);
fn main() {
let optimized = (size_of::<S1>() == 4) as usize
+(size_of::<S2>() == 4) as usize;
assert_eq!(optimized, 1);
}

View File

@ -31,6 +31,17 @@ enum e3 {
a([u16; 0], u8), b
}
struct ReorderedStruct {
a: u8,
b: u16,
c: u8
}
enum ReorderedEnum {
A(u8, u16, u8),
B(u8, u16, u8),
}
pub fn main() {
assert_eq!(size_of::<u8>(), 1 as usize);
assert_eq!(size_of::<u32>(), 4 as usize);
@ -54,4 +65,6 @@ pub fn main() {
assert_eq!(size_of::<e1>(), 8 as usize);
assert_eq!(size_of::<e2>(), 8 as usize);
assert_eq!(size_of::<e3>(), 4 as usize);
assert_eq!(size_of::<ReorderedStruct>(), 4);
assert_eq!(size_of::<ReorderedEnum>(), 6);
}

View File

@ -0,0 +1,21 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="foo"]
#![allow(dead_code)]
// compile-flags: -Z print-fuel=foo
struct S1(u8, u16, u8);
struct S2(u8, u16, u8);
struct S3(u8, u16, u8);
fn main() {
}

View File

@ -0,0 +1 @@
Fuel used by foo: 3

View File

@ -1,25 +1,22 @@
print-type-size type: `IndirectNonZero<u32>`: 20 bytes, alignment: 4 bytes
print-type-size field `.pre`: 1 bytes
print-type-size padding: 3 bytes
print-type-size field `.nested`: 12 bytes, alignment: 4 bytes
print-type-size type: `IndirectNonZero<u32>`: 12 bytes, alignment: 4 bytes
print-type-size field `.nested`: 8 bytes
print-type-size field `.post`: 2 bytes
print-type-size end padding: 2 bytes
print-type-size type: `MyOption<IndirectNonZero<u32>>`: 20 bytes, alignment: 4 bytes
print-type-size variant `Some`: 20 bytes
print-type-size field `.0`: 20 bytes
print-type-size type: `EmbeddedDiscr`: 12 bytes, alignment: 4 bytes
print-type-size variant `Record`: 10 bytes
print-type-size field `.pre`: 1 bytes
print-type-size padding: 3 bytes
print-type-size field `.val`: 4 bytes, alignment: 4 bytes
print-type-size end padding: 1 bytes
print-type-size type: `MyOption<IndirectNonZero<u32>>`: 12 bytes, alignment: 4 bytes
print-type-size variant `Some`: 12 bytes
print-type-size field `.0`: 12 bytes
print-type-size type: `EmbeddedDiscr`: 8 bytes, alignment: 4 bytes
print-type-size variant `Record`: 7 bytes
print-type-size field `.val`: 4 bytes
print-type-size field `.post`: 2 bytes
print-type-size end padding: 2 bytes
print-type-size type: `NestedNonZero<u32>`: 12 bytes, alignment: 4 bytes
print-type-size field `.pre`: 1 bytes
print-type-size padding: 3 bytes
print-type-size field `.val`: 4 bytes, alignment: 4 bytes
print-type-size end padding: 1 bytes
print-type-size type: `NestedNonZero<u32>`: 8 bytes, alignment: 4 bytes
print-type-size field `.val`: 4 bytes
print-type-size field `.post`: 2 bytes
print-type-size end padding: 2 bytes
print-type-size field `.pre`: 1 bytes
print-type-size end padding: 1 bytes
print-type-size type: `MyOption<core::nonzero::NonZero<u32>>`: 4 bytes, alignment: 4 bytes
print-type-size variant `Some`: 4 bytes
print-type-size field `.0`: 4 bytes

View File

@ -1,13 +1,11 @@
print-type-size type: `Padded`: 16 bytes, alignment: 4 bytes
print-type-size type: `Padded`: 12 bytes, alignment: 4 bytes
print-type-size field `.g`: 4 bytes
print-type-size field `.h`: 2 bytes
print-type-size field `.a`: 1 bytes
print-type-size field `.b`: 1 bytes
print-type-size padding: 2 bytes
print-type-size field `.g`: 4 bytes, alignment: 4 bytes
print-type-size field `.c`: 1 bytes
print-type-size padding: 1 bytes
print-type-size field `.h`: 2 bytes, alignment: 2 bytes
print-type-size field `.d`: 1 bytes
print-type-size end padding: 3 bytes
print-type-size end padding: 2 bytes
print-type-size type: `Packed`: 10 bytes, alignment: 1 bytes
print-type-size field `.a`: 1 bytes
print-type-size field `.b`: 1 bytes

View File

@ -1,10 +1,12 @@
print-type-size type: `E1`: 12 bytes, alignment: 4 bytes
print-type-size discriminant: 4 bytes
print-type-size variant `A`: 5 bytes
print-type-size field `.0`: 4 bytes
print-type-size discriminant: 1 bytes
print-type-size variant `A`: 7 bytes
print-type-size field `.1`: 1 bytes
print-type-size variant `B`: 8 bytes
print-type-size field `.0`: 8 bytes
print-type-size padding: 2 bytes
print-type-size field `.0`: 4 bytes, alignment: 4 bytes
print-type-size variant `B`: 11 bytes
print-type-size padding: 3 bytes
print-type-size field `.0`: 8 bytes, alignment: 4 bytes
print-type-size type: `E2`: 12 bytes, alignment: 4 bytes
print-type-size discriminant: 1 bytes
print-type-size variant `A`: 7 bytes
@ -15,7 +17,7 @@ print-type-size variant `B`: 11 bytes
print-type-size padding: 3 bytes
print-type-size field `.0`: 8 bytes, alignment: 4 bytes
print-type-size type: `S`: 8 bytes, alignment: 4 bytes
print-type-size field `.g`: 4 bytes
print-type-size field `.a`: 1 bytes
print-type-size field `.b`: 1 bytes
print-type-size padding: 2 bytes
print-type-size field `.g`: 4 bytes, alignment: 4 bytes
print-type-size end padding: 2 bytes

View File

@ -11,7 +11,7 @@
extern crate toml;
extern crate rustc_serialize;
use std::collections::{BTreeMap, HashMap};
use std::collections::BTreeMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
@ -101,13 +101,13 @@ static MINGW: &'static [&'static str] = &[
struct Manifest {
manifest_version: String,
date: String,
pkg: HashMap<String, Package>,
pkg: BTreeMap<String, Package>,
}
#[derive(RustcEncodable)]
struct Package {
version: String,
target: HashMap<String, Target>,
target: BTreeMap<String, Target>,
}
#[derive(RustcEncodable)]
@ -138,7 +138,7 @@ struct Builder {
input: PathBuf,
output: PathBuf,
gpg_passphrase: String,
digests: HashMap<String, String>,
digests: BTreeMap<String, String>,
s3_address: String,
date: String,
rust_version: String,
@ -162,7 +162,7 @@ fn main() {
input: input,
output: output,
gpg_passphrase: passphrase,
digests: HashMap::new(),
digests: BTreeMap::new(),
s3_address: s3_address,
date: date,
rust_version: String::new(),
@ -214,7 +214,7 @@ impl Builder {
let mut manifest = Manifest {
manifest_version: "2".to_string(),
date: self.date.to_string(),
pkg: HashMap::new(),
pkg: BTreeMap::new(),
};
self.package("rustc", &mut manifest.pkg, HOSTS);
@ -230,7 +230,7 @@ impl Builder {
let mut pkg = Package {
version: self.cached_version("rust").to_string(),
target: HashMap::new(),
target: BTreeMap::new(),
};
for host in HOSTS {
let filename = self.filename("rust", host);
@ -299,7 +299,7 @@ impl Builder {
fn package(&mut self,
pkgname: &str,
dst: &mut HashMap<String, Package>,
dst: &mut BTreeMap<String, Package>,
targets: &[&str]) {
let targets = targets.iter().map(|name| {
let filename = self.filename(pkgname, name);