Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 11 additions & 9 deletions compiler/rustc_borrowck/src/type_check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1537,15 +1537,17 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
}
}
CastKind::Transmute => {
let ty_from = op.ty(self.body, tcx);
match ty_from.kind() {
ty::Pat(base, _) if base == ty => {}
_ => span_mirbug!(
self,
rvalue,
"Unexpected CastKind::Transmute {ty_from:?} -> {ty:?}, which is not permitted in Analysis MIR",
),
}
// FIXME: `init_box_via_move` lowering really wants to use this.
// What do we have to do here?
// let ty_from = op.ty(self.body, tcx);
// match ty_from.kind() {
// ty::Pat(base, _) if base == ty => {}
// _ => span_mirbug!(
// self,
// rvalue,
// "Unexpected CastKind::Transmute {ty_from:?} -> {ty:?}, which is not permitted in Analysis MIR",
// ),
// }
}
CastKind::Subtype => {
bug!("CastKind::Subtype shouldn't exist in borrowck")
Expand Down
10 changes: 7 additions & 3 deletions compiler/rustc_hir_analysis/src/check/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hi
| sym::autodiff
| sym::bitreverse
| sym::black_box
| sym::box_new
| sym::breakpoint
| sym::bswap
| sym::caller_location
Expand Down Expand Up @@ -132,6 +131,7 @@ fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hi
| sym::forget
| sym::frem_algebraic
| sym::fsub_algebraic
| sym::init_box_via_move
| sym::is_val_statically_known
| sym::log2f16
| sym::log2f32
Expand Down Expand Up @@ -553,6 +553,12 @@ pub(crate) fn check_intrinsic_type(
sym::write_via_move => {
(1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], tcx.types.unit)
}
sym::init_box_via_move => {
let t = param(0);
let maybe_uninit_t = Ty::new_maybe_uninit(tcx, t);

(1, 0, vec![Ty::new_box(tcx, maybe_uninit_t), param(0)], Ty::new_box(tcx, t))
}

sym::typed_swap_nonoverlapping => {
(1, 0, vec![Ty::new_mut_ptr(tcx, param(0)); 2], tcx.types.unit)
Expand Down Expand Up @@ -645,8 +651,6 @@ pub(crate) fn check_intrinsic_type(

sym::ub_checks => (0, 0, Vec::new(), tcx.types.bool),

sym::box_new => (1, 0, vec![param(0)], Ty::new_box(tcx, param(0))),

// contract_check_requires::<C>(C) -> bool, where C: impl Fn() -> bool
sym::contract_check_requires => (1, 0, vec![param(0)], tcx.types.unit),
sym::contract_check_ensures => {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3022,6 +3022,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
let deref_kind = if checked_ty.is_box() {
// detect Box::new(..)
// FIXME: use `box_new` diagnostic item instead?
if let ExprKind::Call(box_new, [_]) = expr.kind
&& let ExprKind::Path(qpath) = &box_new.kind
&& let Res::Def(DefKind::AssocFn, fn_id) =
Expand Down
109 changes: 108 additions & 1 deletion compiler/rustc_mir_build/src/builder/expr/into.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ use rustc_middle::mir::*;
use rustc_middle::span_bug;
use rustc_middle::thir::*;
use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty};
use rustc_span::DUMMY_SP;
use rustc_span::source_map::Spanned;
use rustc_span::{DUMMY_SP, sym};
use rustc_trait_selection::infer::InferCtxtExt;
use tracing::{debug, instrument};

Expand Down Expand Up @@ -365,6 +365,113 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
None
})
}
// Some intrinsics are handled here because they desperately want to avoid introducing
// unnecessary copies.
ExprKind::Call { ty, fun, ref args, .. }
if let ty::FnDef(def_id, generic_args) = ty.kind()
&& let Some(intrinsic) = this.tcx.intrinsic(def_id)
&& matches!(intrinsic.name, sym::write_via_move | sym::init_box_via_move) =>
{
// We still have to evaluate the callee expression as normal (but we don't care
// about its result).
let _fun = unpack!(block = this.as_local_operand(block, fun));

match intrinsic.name {
sym::write_via_move => {
// `write_via_move(ptr, val)` becomes `*ptr = val` but without any dropping.

// The destination must have unit type (so we don't actually have to store anything
// into it).
assert!(destination.ty(&this.local_decls, this.tcx).ty.is_unit());

// Compile this to an assignment of the argument into the destination.
let [ptr, val] = **args else {
span_bug!(expr_span, "invalid write_via_move call")
};
let Some(ptr) = unpack!(block = this.as_local_operand(block, ptr)).place()
else {
span_bug!(expr_span, "invalid write_via_move call")
};
let ptr_deref = ptr.project_deeper(&[ProjectionElem::Deref], this.tcx);
this.expr_into_dest(ptr_deref, block, val)
}
sym::init_box_via_move => {
// `write_via_move(b, val)` becomes
// ```
// *transmute::<_, *mut T>(b) = val;
// transmute::<_, Box<T>>(b)
// ```
let t = generic_args.type_at(0);
let [b, val] = **args else {
span_bug!(expr_span, "invalid init_box_via_move call")
};
let Some(b) = unpack!(block = this.as_local_operand(block, b)).place()
else {
span_bug!(expr_span, "invalid init_box_via_move call")
};
// Project to the pointer inside `b`. We have to keep `b` in scope to ensure
// it gets dropped. After the first projection we can transmute which is
// easier.
let ty::Adt(box_adt_def, box_adt_args) =
b.ty(&this.local_decls, this.tcx).ty.kind()
else {
span_bug!(expr_span, "invalid init_box_via_move call")
};
let unique_field =
this.tcx.adt_def(box_adt_def.did()).non_enum_variant().fields
[rustc_abi::FieldIdx::ZERO]
.did;
let Some(unique_def) =
this.tcx.type_of(unique_field).instantiate_identity().ty_adt_def()
else {
span_bug!(
this.tcx.def_span(unique_field),
"expected Box to contain Unique"
)
};
let unique_ty =
Ty::new_adt(this.tcx, unique_def, this.tcx.mk_args(&[box_adt_args[0]]));
let b_field = b.project_deeper(
&[ProjectionElem::Field(rustc_abi::FieldIdx::ZERO, unique_ty)],
this.tcx,
);
// `ptr` is `b` transmuted to `*mut T`.
let ptr_ty = Ty::new_mut_ptr(this.tcx, t);
let ptr = this.local_decls.push(LocalDecl::new(ptr_ty, expr_span));
this.cfg.push(
block,
Statement::new(source_info, StatementKind::StorageLive(ptr)),
);
// Make sure `StorageDead` gets emitted.
this.schedule_drop_storage_and_value(expr_span, this.local_scope(), ptr);
this.cfg.push_assign(
block,
source_info,
Place::from(ptr),
// Needs to be a `Copy` so that `b` still gets dropped if `val` panics.
Rvalue::Cast(CastKind::Transmute, Operand::Copy(b_field), ptr_ty),
);
// Store `val` into `ptr`.
let ptr_deref =
Place::from(ptr).project_deeper(&[ProjectionElem::Deref], this.tcx);
unpack!(block = this.expr_into_dest(ptr_deref, block, val));
// Return `ptr` transmuted to `Box<T>`.
this.cfg.push_assign(
block,
source_info,
destination,
Rvalue::Cast(
CastKind::Transmute,
// Move from `b` so that does not get dropped any more.
Operand::Move(b),
Ty::new_box(this.tcx, t),
),
);
block.unit()
}
_ => rustc_middle::bug!(),
}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I sure hope all this code does the right cleanup if evaluating val panics... but I have no idea.^^ This is the first time I even look at the MIR building code.

}
ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => {
let fun = unpack!(block = this.as_local_operand(block, fun));
let args: Box<[_]> = args
Expand Down
20 changes: 1 addition & 19 deletions compiler/rustc_mir_build/src/thir/cx/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use rustc_middle::ty::{
self, AdtKind, GenericArgs, InlineConstArgs, InlineConstArgsParts, ScalarInt, Ty, UpvarArgs,
};
use rustc_middle::{bug, span_bug};
use rustc_span::{Span, sym};
use rustc_span::Span;
use tracing::{debug, info, instrument, trace};

use crate::errors::*;
Expand Down Expand Up @@ -385,24 +385,6 @@ impl<'tcx> ThirBuildCx<'tcx> {
from_hir_call: true,
fn_span: expr.span,
}
} else if let ty::FnDef(def_id, _) = self.typeck_results.expr_ty(fun).kind()
&& let Some(intrinsic) = self.tcx.intrinsic(def_id)
&& intrinsic.name == sym::box_new
{
// We don't actually evaluate `fun` here, so make sure that doesn't miss any side-effects.
if !matches!(fun.kind, hir::ExprKind::Path(_)) {
span_bug!(
expr.span,
"`box_new` intrinsic can only be called via path expression"
);
}
let value = &args[0];
return Expr {
temp_lifetime: TempLifetime { temp_lifetime, backwards_incompatible },
ty: expr_ty,
span: expr.span,
kind: ExprKind::Box { value: self.mirror_expr(value) },
};
} else {
// Tuple-like ADTs are represented as ExprKind::Call. We convert them here.
let adt_data = if let hir::ExprKind::Path(ref qpath) = fun.kind
Expand Down
25 changes: 1 addition & 24 deletions compiler/rustc_mir_transform/src/lower_intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,30 +171,7 @@ impl<'tcx> crate::MirPass<'tcx> for LowerIntrinsics {
Some(target) => TerminatorKind::Goto { target },
}
}
sym::write_via_move => {
let target = target.unwrap();
let Ok([ptr, val]) = take_array(args) else {
span_bug!(
terminator.source_info.span,
"Wrong number of arguments for write_via_move intrinsic",
);
};
let derefed_place = if let Some(place) = ptr.node.place()
&& let Some(local) = place.as_local()
{
tcx.mk_place_deref(local.into())
} else {
span_bug!(
terminator.source_info.span,
"Only passing a local is supported"
);
};
block.statements.push(Statement::new(
terminator.source_info,
StatementKind::Assign(Box::new((derefed_place, Rvalue::Use(val.node)))),
));
terminator.kind = TerminatorKind::Goto { target };
}
// `write_via_move` is already lowered during MIR building.
sym::discriminant_value => {
let target = target.unwrap();
let Ok([arg]) = take_array(args) else {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_span/src/symbol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1233,6 +1233,7 @@ symbols! {
infer_static_outlives_requirements,
inherent_associated_types,
inherit,
init_box_via_move,
initial,
inlateout,
inline,
Expand Down
13 changes: 0 additions & 13 deletions library/alloc/src/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,19 +342,6 @@ unsafe impl Allocator for Global {
}
}

/// The allocator for `Box`.
#[cfg(not(no_global_oom_handling))]
#[lang = "exchange_malloc"]
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.allocate(layout) {
Ok(ptr) => ptr.as_mut_ptr(),
Err(_) => handle_alloc_error(layout),
}
}

// # Allocation error handler

#[cfg(not(no_global_oom_handling))]
Expand Down
64 changes: 55 additions & 9 deletions library/alloc/src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ use core::fmt;
use core::future::Future;
use core::hash::{Hash, Hasher};
use core::marker::{Tuple, Unsize};
use core::mem::{self, SizedTypeProperties};
use core::mem::{self, MaybeUninit, SizedTypeProperties};
use core::ops::{
AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut,
DerefPure, DispatchFromDyn, LegacyReceiver,
Expand Down Expand Up @@ -233,14 +233,39 @@ pub struct Box<
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
>(Unique<T>, A);

/// Constructs a `Box<T>` by calling the `exchange_malloc` lang item and moving the argument into
/// the newly allocated memory. This is an intrinsic to avoid unnecessary copies.
/// Monomorphic function for allocating an uninit `Box`.
///
/// This is the surface syntax for `box <expr>` expressions.
#[doc(hidden)]
/// # Safety
///
/// size and align need to be safe for `Layout::from_size_align_unchecked`.
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
unsafe fn box_new_uninit(size: usize, align: usize) -> *mut u8 {
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.allocate(layout) {
Ok(ptr) => ptr.as_mut_ptr(),
Err(_) => handle_alloc_error(layout),
}
}

/// Writes `x` into `b`, then returns `b` at its new type`.
///
/// This is needed for `vec!`, which can't afford any extra copies of the argument (or else debug
/// builds regress), has to be written fully as a call chain without `let` (or else the temporary
/// lifetimes of the arguments change), and can't use an `unsafe` block as that would then also
/// include the user-provided `$x`.
#[rustc_intrinsic]
#[unstable(feature = "liballoc_internals", issue = "none")]
pub fn box_new<T>(x: T) -> Box<T>;
pub fn init_box_via_move<T>(b: Box<MaybeUninit<T>>, x: T) -> Box<T>;

/// Helper for `vec!` to ensure type inferences work correctly (which it wouldn't if we
/// inlined the `as` cast).
#[doc(hidden)]
#[unstable(feature = "liballoc_internals", issue = "none")]
#[inline(always)]
pub fn box_array_into_vec<T, const N: usize>(b: Box<[T; N]>) -> crate::vec::Vec<T> {
(b as Box<[T]>).into_vec()
}

impl<T> Box<T> {
/// Allocates memory on the heap and then places `x` into it.
Expand All @@ -259,7 +284,16 @@ impl<T> Box<T> {
#[rustc_diagnostic_item = "box_new"]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn new(x: T) -> Self {
return box_new(x);
// This is `Box::new_uninit` but inlined to avoid build time regressions.
// SAFETY: The size and align of a valid type `T` are always valid for `Layout`.
let ptr = unsafe {
box_new_uninit(<T as SizedTypeProperties>::SIZE, <T as SizedTypeProperties>::ALIGN)
} as *mut T;
// Nothing below can panic so we do not have to worry about deallocating `ptr`.
// SAFETY: we just allocated the box to store `x`.
unsafe { core::intrinsics::write_via_move(ptr, x) };
// SAFETY: we just initialized `b`.
unsafe { mem::transmute(ptr) }
}

/// Constructs a new box with uninitialized contents.
Expand All @@ -277,9 +311,21 @@ impl<T> Box<T> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "new_uninit", since = "1.82.0")]
#[must_use]
#[inline]
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
Self::new_uninit_in(Global)
// This is the same as `Self::new_uninit_in(Global)`, but manually inlined (just like
// `Box::new`).

// SAFETY:
// - The size and align of a valid type `T` are always valid for `Layout`.
// - If `allocate` succeeds, the returned pointer exactly matches what `Box` needs.
unsafe {
mem::transmute(box_new_uninit(
<T as SizedTypeProperties>::SIZE,
<T as SizedTypeProperties>::ALIGN,
))
}
}

/// Constructs a new `Box` with uninitialized contents, with the memory
Expand Down
Loading
Loading