Skip to content

Commit

Permalink
Auto merge of #35764 - eddyb:byegone, r=nikomatsakis
Browse files Browse the repository at this point in the history
Remove the old AST-based backend from rustc_trans.

Starting with Rust 1.13, `--disable-orbit` , `-Z orbit=off` and `#[rustc_no_mir]` have been removed.
Only the new MIR backend is left in the compiler, and only early const_eval uses ASTs from other crates.

Filling drop (previously "zeroing drop"), `#[unsafe_no_drop_flag]` and associated unstable APIs are gone.
Implementing `Drop` doesn't add a flag anymore to the type, all of the dynamic drop is function local.
This is a [breaking-change], please use `Option::None` and/or `mem::forget` if you are unsure about your ability to prevent/control the drop of a value. In the future, `union` will be usable in some such cases.

**NOTE**: DO NOT MERGE before we get the new beta as the stage0, there's some cruft to remove.

All of this will massively simplify any efforts to implement (and as such it blocks) features such as `union`s, safe use of `#[packed]` or new type layout optimizations, not to mention many other experiments.
  • Loading branch information
bors committed Aug 24, 2016
2 parents 03e23c7 + 25cf800 commit e9bc1ba
Show file tree
Hide file tree
Showing 140 changed files with 886 additions and 12,582 deletions.
2 changes: 0 additions & 2 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -733,8 +733,6 @@ if [ -n "$CFG_ENABLE_DEBUG_ASSERTIONS" ]; then putvar CFG_ENABLE_DEBUG_ASSERTION
if [ -n "$CFG_ENABLE_DEBUGINFO" ]; then putvar CFG_ENABLE_DEBUGINFO; fi
if [ -n "$CFG_ENABLE_DEBUG_JEMALLOC" ]; then putvar CFG_ENABLE_DEBUG_JEMALLOC; fi

if [ -n "$CFG_DISABLE_ORBIT" ]; then putvar CFG_DISABLE_ORBIT; fi

step_msg "looking for build programs"

probe_need CFG_CURL curl
Expand Down
6 changes: 0 additions & 6 deletions mk/main.mk
Original file line number Diff line number Diff line change
Expand Up @@ -162,12 +162,6 @@ ifdef CFG_ENABLE_DEBUGINFO
CFG_RUSTC_FLAGS += -g
endif

ifdef CFG_DISABLE_ORBIT
$(info cfg: HOLD HOLD HOLD (CFG_DISABLE_ORBIT))
RUSTFLAGS_STAGE1 += -Z orbit=off
RUSTFLAGS_STAGE2 += -Z orbit=off
endif

ifdef SAVE_TEMPS
CFG_RUSTC_FLAGS += -C save-temps
endif
Expand Down
19 changes: 2 additions & 17 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// }
/// ```

#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
Expand All @@ -147,7 +147,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
/// nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
/// as `Weak<T>` pointers.

#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
Expand Down Expand Up @@ -559,15 +559,6 @@ impl<T: ?Sized> Drop for Arc<T> {
#[unsafe_destructor_blind_to_params]
#[inline]
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
let thin = *self.ptr as *const ();

if thin as usize == mem::POST_DROP_USIZE {
return;
}

// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
Expand Down Expand Up @@ -755,12 +746,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
let ptr = *self.ptr;
let thin = ptr as *const ();

// see comments above for why this check is here
if thin as usize == mem::POST_DROP_USIZE {
return;
}

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
Expand Down
2 changes: 1 addition & 1 deletion src/liballoc/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag, filling_drop)]
#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![feature(unsize)]

#![cfg_attr(not(test), feature(fused, raw, fn_traits, placement_new_protocol))]
Expand Down
11 changes: 2 additions & 9 deletions src/liballoc/raw_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ use core::cmp;
/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
/// field. This allows zero-sized types to not be special-cased by consumers of
/// this type.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
pub struct RawVec<T> {
ptr: Unique<T>,
cap: usize,
Expand Down Expand Up @@ -546,21 +546,14 @@ impl<T> RawVec<T> {
mem::forget(self);
output
}

/// This is a stupid name in the hopes that someone will find this in the
/// not too distant future and remove it with the rest of
/// #[unsafe_no_drop_flag]
pub fn unsafe_no_drop_flag_needs_drop(&self) -> bool {
self.cap != mem::POST_DROP_USIZE
}
}

impl<T> Drop for RawVec<T> {
#[unsafe_destructor_blind_to_params]
/// Frees the memory owned by the RawVec *without* trying to Drop its contents.
fn drop(&mut self) {
let elem_size = mem::size_of::<T>();
if elem_size != 0 && self.cap != 0 && self.unsafe_no_drop_flag_needs_drop() {
if elem_size != 0 && self.cap != 0 {
let align = mem::align_of::<T>();

let num_bytes = elem_size * self.cap;
Expand Down
38 changes: 16 additions & 22 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ struct RcBox<T: ?Sized> {
/// A reference-counted pointer type over an immutable value.
///
/// See the [module level documentation](./index.html) for more details.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
ptr: Shared<RcBox<T>>,
Expand Down Expand Up @@ -466,21 +466,18 @@ impl<T: ?Sized> Drop for Rc<T> {
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let thin = ptr as *const ();

if thin as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);

// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();

if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -724,7 +721,7 @@ impl<T> From<T> for Rc<T> {
/// dropped.
///
/// See the [module level documentation](./index.html) for more.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<RcBox<T>>,
Expand Down Expand Up @@ -825,15 +822,12 @@ impl<T: ?Sized> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let thin = ptr as *const ();

if thin as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/libcollections/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
#![feature(step_by)]
#![feature(unicode)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![cfg_attr(test, feature(rand, test))]

#![no_std]
Expand Down
10 changes: 4 additions & 6 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ use super::range::RangeArgument;
/// Vec does not currently guarantee the order in which elements are dropped
/// (the order has changed in the past, and may change again).
///
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Vec<T> {
buf: RawVec<T>,
Expand Down Expand Up @@ -1600,11 +1600,9 @@ impl<T: Ord> Ord for Vec<T> {
impl<T> Drop for Vec<T> {
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
if self.buf.unsafe_no_drop_flag_needs_drop() {
unsafe {
// use drop for [T]
ptr::drop_in_place(&mut self[..]);
}
unsafe {
// use drop for [T]
ptr::drop_in_place(&mut self[..]);
}
// RawVec handles deallocation
}
Expand Down
13 changes: 0 additions & 13 deletions src/libcore/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,19 +244,6 @@ extern "rust-intrinsic" {
/// crate it is invoked in.
pub fn type_id<T: ?Sized + 'static>() -> u64;

/// Creates a value initialized to so that its drop flag,
/// if any, says that it has been dropped.
///
/// `init_dropped` is unsafe because it returns a datum with all
/// of its bytes set to the drop flag, which generally does not
/// correspond to a valid value.
///
/// This intrinsic is likely to be deprecated in the future when
/// Rust moves to non-zeroing dynamic drop (and thus removes the
/// embedded drop flags that are being established by this
/// intrinsic).
pub fn init_dropped<T>() -> T;

/// Creates a value initialized to zero.
///
/// `init` is unsafe because it returns a zeroed-out datum,
Expand Down
71 changes: 0 additions & 71 deletions src/libcore/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,27 +241,6 @@ pub unsafe fn zeroed<T>() -> T {
intrinsics::init()
}

/// Creates a value initialized to an unspecified series of bytes.
///
/// The byte sequence usually indicates that the value at the memory
/// in question has been dropped. Thus, *if* T carries a drop flag,
/// any associated destructor will not be run when the value falls out
/// of scope.
///
/// Some code at one time used the `zeroed` function above to
/// accomplish this goal.
///
/// This function is expected to be deprecated with the transition
/// to non-zeroing drop.
#[inline]
#[unstable(feature = "filling_drop", issue = "5016")]
pub unsafe fn dropped<T>() -> T {
#[inline(always)]
unsafe fn dropped_impl<T>() -> T { intrinsics::init_dropped() }

dropped_impl()
}

/// Bypasses Rust's normal memory-initialization checks by pretending to
/// produce a value of type T, while doing nothing at all.
///
Expand Down Expand Up @@ -518,56 +497,6 @@ pub fn replace<T>(dest: &mut T, mut src: T) -> T {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn drop<T>(_x: T) { }

macro_rules! repeat_u8_as_u16 {
($name:expr) => { (($name as u16) << 8 |
($name as u16)) }
}
macro_rules! repeat_u8_as_u32 {
($name:expr) => { (($name as u32) << 24 |
($name as u32) << 16 |
($name as u32) << 8 |
($name as u32)) }
}
macro_rules! repeat_u8_as_u64 {
($name:expr) => { ((repeat_u8_as_u32!($name) as u64) << 32 |
(repeat_u8_as_u32!($name) as u64)) }
}

// NOTE: Keep synchronized with values used in librustc_trans::trans::adt.
//
// In particular, the POST_DROP_U8 marker must never equal the
// DTOR_NEEDED_U8 marker.
//
// For a while pnkfelix was using 0xc1 here.
// But having the sign bit set is a pain, so 0x1d is probably better.
//
// And of course, 0x00 brings back the old world of zero'ing on drop.
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U8: u8 = 0x1d;
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U16: u16 = repeat_u8_as_u16!(POST_DROP_U8);
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U32: u32 = repeat_u8_as_u32!(POST_DROP_U8);
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U64: u64 = repeat_u8_as_u64!(POST_DROP_U8);

#[cfg(target_pointer_width = "16")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U16 as usize;
#[cfg(target_pointer_width = "32")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U32 as usize;
#[cfg(target_pointer_width = "64")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U64 as usize;

/// Interprets `src` as `&U`, and then reads `src` without moving the contained
/// value.
///
Expand Down
15 changes: 0 additions & 15 deletions src/libcore/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,21 +140,6 @@ pub unsafe fn read<T>(src: *const T) -> T {
tmp
}

#[allow(missing_docs)]
#[inline(always)]
#[unstable(feature = "filling_drop",
reason = "may play a larger role in std::ptr future extensions",
issue = "5016")]
pub unsafe fn read_and_drop<T>(dest: *mut T) -> T {
// Copy the data out from `dest`:
let tmp = read(&*dest);

// Now mark `dest` as dropped:
write_bytes(dest, mem::POST_DROP_U8, 1);

tmp
}

/// Overwrites a memory location with the given value without reading or
/// dropping the old value.
///
Expand Down
5 changes: 1 addition & 4 deletions src/librustc/hir/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,8 +315,7 @@ impl<'ast> Map<'ast> {
RootInlinedParent(parent) => match *parent {
InlinedItem::Item(def_id, _) |
InlinedItem::TraitItem(def_id, _) |
InlinedItem::ImplItem(def_id, _) |
InlinedItem::Foreign(def_id, _) =>
InlinedItem::ImplItem(def_id, _) =>
return DepNode::MetaData(def_id)
},

Expand Down Expand Up @@ -940,8 +939,6 @@ pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>,
II::ImplItem(fld.fold_ops.new_def_id(d),
ii.map(|ii| fld.fold_impl_item(ii)))
}
II::Foreign(d, i) => II::Foreign(fld.fold_ops.new_def_id(d),
i.map(|i| fld.fold_foreign_item(i)))
};

let ii = map.forest.inlined_items.alloc(ii);
Expand Down
7 changes: 2 additions & 5 deletions src/librustc/middle/cstore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,17 +96,15 @@ pub enum DefLike {
pub enum InlinedItem {
Item(DefId /* def-id in source crate */, P<hir::Item>),
TraitItem(DefId /* impl id */, P<hir::TraitItem>),
ImplItem(DefId /* impl id */, P<hir::ImplItem>),
Foreign(DefId /* extern item */, P<hir::ForeignItem>),
ImplItem(DefId /* impl id */, P<hir::ImplItem>)
}

/// A borrowed version of `hir::InlinedItem`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum InlinedItemRef<'a> {
Item(DefId, &'a hir::Item),
TraitItem(DefId, &'a hir::TraitItem),
ImplItem(DefId, &'a hir::ImplItem),
Foreign(DefId, &'a hir::ForeignItem)
ImplItem(DefId, &'a hir::ImplItem)
}

/// Item definitions in the currently-compiled crate would have the CrateNum
Expand Down Expand Up @@ -286,7 +284,6 @@ impl InlinedItem {
{
match *self {
InlinedItem::Item(_, ref i) => visitor.visit_item(&i),
InlinedItem::Foreign(_, ref i) => visitor.visit_foreign_item(&i),
InlinedItem::TraitItem(_, ref ti) => visitor.visit_trait_item(ti),
InlinedItem::ImplItem(_, ref ii) => visitor.visit_impl_item(ii),
}
Expand Down
Loading

0 comments on commit e9bc1ba

Please sign in to comment.