Skip to content

Commit

Permalink
Auto merge of rust-lang#128142 - matthiaskrgr:rollup-rep8ofv, r=matth…
Browse files Browse the repository at this point in the history
…iaskrgr

Rollup of 9 pull requests

Successful merges:

 - rust-lang#126152 (size_of_val_raw: for length 0 this is safe to call)
 - rust-lang#127252 (Add edge-case examples to `{count,leading,trailing}_{ones,zeros}` methods)
 - rust-lang#127374 (Tweak "wrong # of generics" suggestions)
 - rust-lang#127457 (Make tidy fast without compromising case alternation)
 - rust-lang#127480 (Fix build failure on vxworks rust-lang#127084 )
 - rust-lang#127733 (Replace some `mem::forget`'s with `ManuallyDrop`)
 - rust-lang#128120 (Gate `AsyncFn*` under `async_closure` feature)
 - rust-lang#128131 (Import `c_void` rather than using the full path)
 - rust-lang#128133 (Improve spans on evaluated `cfg_attr`s.)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Jul 24, 2024
2 parents 2ccafed + 2dc88bf commit 6106b05
Show file tree
Hide file tree
Showing 107 changed files with 528 additions and 435 deletions.
22 changes: 15 additions & 7 deletions compiler/rustc_ast_lowering/src/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let mut res = self.lower_res(base_res);

// When we have an `async` kw on a bound, map the trait it resolves to.
let mut bound_modifier_allowed_features = None;
if let Some(TraitBoundModifiers { asyncness: BoundAsyncness::Async(_), .. }) = modifiers {
match res {
Res::Def(DefKind::Trait, def_id) => {
if let Some((async_def_id, features)) = self.map_trait_to_async_trait(def_id) {
if let Some(async_def_id) = self.map_trait_to_async_trait(def_id) {
res = Res::Def(DefKind::Trait, async_def_id);
bound_modifier_allowed_features = Some(features);
} else {
self.dcx().emit_err(AsyncBoundOnlyForFnTraits { span: p.span });
}
Expand All @@ -67,6 +65,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
}

// Ungate the `async_fn_traits` feature in the path if the trait is
// named via either `async Fn*()` or `AsyncFn*()`.
let bound_modifier_allowed_features = if let Res::Def(DefKind::Trait, async_def_id) = res
&& self.tcx.async_fn_trait_kind_from_def_id(async_def_id).is_some()
{
Some(self.allow_async_fn_traits.clone())
} else {
None
};

let path_span_lo = p.span.shrink_to_lo();
let proj_start = p.segments.len() - unresolved_segments;
let path = self.arena.alloc(hir::Path {
Expand Down Expand Up @@ -506,14 +514,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
/// This only needs to be done until we unify `AsyncFn` and `Fn` traits into one
/// that is generic over `async`ness, if that's ever possible, or modify the
/// lowering of `async Fn()` bounds to desugar to another trait like `LendingFn`.
fn map_trait_to_async_trait(&self, def_id: DefId) -> Option<(DefId, Lrc<[Symbol]>)> {
fn map_trait_to_async_trait(&self, def_id: DefId) -> Option<DefId> {
let lang_items = self.tcx.lang_items();
if Some(def_id) == lang_items.fn_trait() {
Some((lang_items.async_fn_trait()?, self.allow_async_fn_traits.clone()))
lang_items.async_fn_trait()
} else if Some(def_id) == lang_items.fn_mut_trait() {
Some((lang_items.async_fn_mut_trait()?, self.allow_async_fn_traits.clone()))
lang_items.async_fn_mut_trait()
} else if Some(def_id) == lang_items.fn_once_trait() {
Some((lang_items.async_fn_once_trait()?, self.allow_async_fn_traits.clone()))
lang_items.async_fn_once_trait()
} else {
None
}
Expand Down
56 changes: 28 additions & 28 deletions compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::errors::{
};
use rustc_ast::ptr::P;
use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, Spacing};
use rustc_ast::tokenstream::{LazyAttrTokenStream, TokenTree};
use rustc_ast::NodeId;
use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem};
Expand Down Expand Up @@ -298,47 +298,47 @@ impl<'a> StripUnconfigured<'a> {
cfg_attr: &Attribute,
(item, item_span): (ast::AttrItem, Span),
) -> Attribute {
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
// have captured tokens for `attr` itself, but we need to
// synthesize tokens for the wrapper `#` and `[]`, which
// we do below.

// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
// Convert `#[cfg_attr(pred, attr)]` to `#[attr]`.

// Use the `#` from `#[cfg_attr(pred, attr)]` in the result `#[attr]`.
let mut orig_trees = cfg_attr.token_trees().into_iter();
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) =
orig_trees.next().unwrap().clone()
let Some(TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _)) =
orig_trees.next()
else {
panic!("Bad tokens for attribute {cfg_attr:?}");
};

// We don't really have a good span to use for the synthesized `[]`
// in `#[attr]`, so just use the span of the `#` token.
let bracket_group = AttrTokenTree::Delimited(
DelimSpan::from_single(pound_token.span),
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
item.tokens
.as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {item:?}"))
.to_attr_token_stream(),
);
let trees = if cfg_attr.style == AttrStyle::Inner {
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
orig_trees.next().unwrap().clone()
// For inner attributes, we do the same thing for the `!` in `#![attr]`.
let mut trees = if cfg_attr.style == AttrStyle::Inner {
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _)) =
orig_trees.next()
else {
panic!("Bad tokens for attribute {cfg_attr:?}");
};
vec![
AttrTokenTree::Token(pound_token, Spacing::Joint),
AttrTokenTree::Token(bang_token, Spacing::JointHidden),
bracket_group,
]
} else {
vec![AttrTokenTree::Token(pound_token, Spacing::JointHidden), bracket_group]
vec![AttrTokenTree::Token(pound_token, Spacing::JointHidden)]
};

// And the same thing for the `[`/`]` delimiters in `#[attr]`.
let Some(TokenTree::Delimited(delim_span, delim_spacing, Delimiter::Bracket, _)) =
orig_trees.next()
else {
panic!("Bad tokens for attribute {cfg_attr:?}");
};
trees.push(AttrTokenTree::Delimited(
delim_span,
delim_spacing,
Delimiter::Bracket,
item.tokens
.as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {item:?}"))
.to_attr_token_stream(),
));

let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
let attr = attr::mk_attr_from_item(
&self.sess.psess.attr_id_generator,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -888,7 +888,7 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
let comma = if args.len() > 0 { ", " } else { "" };
let trait_path = self.tcx.def_path_str(trait_def_id);
let method_name = self.tcx.item_name(self.def_id);
err.span_suggestion(
err.span_suggestion_verbose(
expr.span,
msg,
format!("{trait_path}::{generics}::{method_name}({rcvr}{comma}{rest})"),
Expand Down Expand Up @@ -939,18 +939,20 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
}
}

let span_lo_redundant_lt_args = lt_arg_spans[self.num_expected_lifetime_args()];
let span_lo_redundant_lt_args = if self.num_expected_lifetime_args() == 0 {
lt_arg_spans[0]
} else {
lt_arg_spans[self.num_expected_lifetime_args() - 1]
};
let span_hi_redundant_lt_args = lt_arg_spans[lt_arg_spans.len() - 1];

let span_redundant_lt_args = span_lo_redundant_lt_args.to(span_hi_redundant_lt_args);
let span_redundant_lt_args =
span_lo_redundant_lt_args.shrink_to_hi().to(span_hi_redundant_lt_args);
debug!("span_redundant_lt_args: {:?}", span_redundant_lt_args);

let num_redundant_lt_args = lt_arg_spans.len() - self.num_expected_lifetime_args();
let msg_lifetimes = format!(
"remove {these} lifetime argument{s}",
these = pluralize!("this", num_redundant_lt_args),
s = pluralize!(num_redundant_lt_args),
);
let msg_lifetimes =
format!("remove the lifetime argument{s}", s = pluralize!(num_redundant_lt_args));

err.span_suggestion(
span_redundant_lt_args,
Expand Down Expand Up @@ -979,18 +981,22 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
}

let span_lo_redundant_type_or_const_args =
gen_arg_spans[self.num_expected_type_or_const_args()];
if self.num_expected_type_or_const_args() == 0 {
gen_arg_spans[0]
} else {
gen_arg_spans[self.num_expected_type_or_const_args() - 1]
};
let span_hi_redundant_type_or_const_args = gen_arg_spans[gen_arg_spans.len() - 1];
let span_redundant_type_or_const_args = span_lo_redundant_type_or_const_args
.shrink_to_hi()
.to(span_hi_redundant_type_or_const_args);

let span_redundant_type_or_const_args =
span_lo_redundant_type_or_const_args.to(span_hi_redundant_type_or_const_args);
debug!("span_redundant_type_or_const_args: {:?}", span_redundant_type_or_const_args);

let num_redundant_gen_args =
gen_arg_spans.len() - self.num_expected_type_or_const_args();
let msg_types_or_consts = format!(
"remove {these} generic argument{s}",
these = pluralize!("this", num_redundant_gen_args),
"remove the unnecessary generic argument{s}",
s = pluralize!(num_redundant_gen_args),
);

Expand Down Expand Up @@ -1036,7 +1042,7 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
.with_lo(self.path_segment.ident.span.hi());

let msg = format!(
"remove these {}generics",
"remove the unnecessary {}generics",
if self.gen_args.parenthesized == hir::GenericArgsParentheses::ParenSugar {
"parenthetical "
} else {
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@
#![feature(array_windows)]
#![feature(ascii_char)]
#![feature(assert_matches)]
#![feature(async_closure)]
#![feature(async_fn_traits)]
#![feature(async_iterator)]
#![feature(clone_to_uninit)]
Expand Down
47 changes: 22 additions & 25 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ use core::intrinsics::abort;
#[cfg(not(no_global_oom_handling))]
use core::iter;
use core::marker::{PhantomData, Unsize};
use core::mem::{self, align_of_val_raw, forget, ManuallyDrop};
use core::mem::{self, align_of_val_raw, ManuallyDrop};
use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, Receiver};
use core::panic::{RefUnwindSafe, UnwindSafe};
#[cfg(not(no_global_oom_handling))]
Expand Down Expand Up @@ -908,19 +908,18 @@ impl<T, A: Allocator> Rc<T, A> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
if Rc::strong_count(&this) == 1 {
unsafe {
let val = ptr::read(&*this); // copy the contained object
let alloc = ptr::read(&this.alloc); // copy the allocator

// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.inner().dec_strong();
let _weak = Weak { ptr: this.ptr, alloc };
forget(this);
Ok(val)
}
let this = ManuallyDrop::new(this);

let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator

// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.inner().dec_strong();
let _weak = Weak { ptr: this.ptr, alloc };
Ok(val)
} else {
Err(this)
}
Expand Down Expand Up @@ -1354,9 +1353,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let ptr = Self::as_ptr(&this);
mem::forget(this);
ptr
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}

/// Consumes the `Rc`, returning the wrapped pointer and allocator.
Expand Down Expand Up @@ -2127,7 +2125,7 @@ impl<T> Rc<[T]> {
}

// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);
mem::forget(guard);

Self::from_ptr(ptr)
}
Expand Down Expand Up @@ -3080,9 +3078,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
let result = self.as_ptr();
mem::forget(self);
result
mem::ManuallyDrop::new(self).as_ptr()
}

/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
Expand Down Expand Up @@ -3762,10 +3758,11 @@ impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
/// # Safety
///
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
unsafe fn into_rc(mut self) -> Rc<T, A> {
let ptr = self.ptr;
let alloc = self.alloc.take().unwrap();
mem::forget(self);
unsafe fn into_rc(self) -> Rc<T, A> {
let mut this = ManuallyDrop::new(self);
let ptr = this.ptr;
let alloc = this.alloc.take().unwrap();

// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
// for having initialized the data.
unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
Expand Down
36 changes: 16 additions & 20 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use core::intrinsics::abort;
#[cfg(not(no_global_oom_handling))]
use core::iter;
use core::marker::{PhantomData, Unsize};
use core::mem::{self, align_of_val_raw};
use core::mem::{self, align_of_val_raw, ManuallyDrop};
use core::ops::{CoerceUnsized, Deref, DerefPure, DispatchFromDyn, Receiver};
use core::panic::{RefUnwindSafe, UnwindSafe};
use core::pin::Pin;
Expand Down Expand Up @@ -960,16 +960,14 @@ impl<T, A: Allocator> Arc<T, A> {

acquire!(this.inner().strong);

unsafe {
let elem = ptr::read(&this.ptr.as_ref().data);
let alloc = ptr::read(&this.alloc); // copy the allocator
let this = ManuallyDrop::new(this);
let elem: T = unsafe { ptr::read(&this.ptr.as_ref().data) };
let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator

// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr, alloc };
mem::forget(this);
// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr, alloc };

Ok(elem)
}
Ok(elem)
}

/// Returns the inner value, if the `Arc` has exactly one strong reference.
Expand Down Expand Up @@ -1493,9 +1491,8 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
#[stable(feature = "rc_raw", since = "1.17.0")]
#[rustc_never_returns_null_ptr]
pub fn into_raw(this: Self) -> *const T {
let ptr = Self::as_ptr(&this);
mem::forget(this);
ptr
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}

/// Consumes the `Arc`, returning the wrapped pointer and allocator.
Expand Down Expand Up @@ -2801,9 +2798,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
#[must_use = "losing the pointer will leak memory"]
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
let result = self.as_ptr();
mem::forget(self);
result
ManuallyDrop::new(self).as_ptr()
}

/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
Expand Down Expand Up @@ -3875,13 +3870,14 @@ impl<T: ?Sized, A: Allocator> UniqueArcUninit<T, A> {
/// # Safety
///
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
unsafe fn into_arc(mut self) -> Arc<T, A> {
let ptr = self.ptr;
let alloc = self.alloc.take().unwrap();
mem::forget(self);
unsafe fn into_arc(self) -> Arc<T, A> {
let mut this = ManuallyDrop::new(self);
let ptr = this.ptr.as_ptr();
let alloc = this.alloc.take().unwrap();

// SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible
// for having initialized the data.
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
unsafe { Arc::from_ptr_in(ptr, alloc) }
}
}

Expand Down
2 changes: 2 additions & 0 deletions library/core/src/alloc/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,8 @@ impl Layout {
/// - a [slice], then the length of the slice tail must be an initialized
/// integer, and the size of the *entire value*
/// (dynamic tail length + statically sized prefix) must fit in `isize`.
/// For the special case where the dynamic tail length is 0, this function
/// is safe to call.
/// - a [trait object], then the vtable part of the pointer must point
/// to a valid vtable for the type `T` acquired by an unsizing coercion,
/// and the size of the *entire value*
Expand Down
Loading

0 comments on commit 6106b05

Please sign in to comment.