diff --git a/crates/block2/CHANGELOG.md b/crates/block2/CHANGELOG.md index d875c414c..fcfa42776 100644 --- a/crates/block2/CHANGELOG.md +++ b/crates/block2/CHANGELOG.md @@ -6,16 +6,30 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Unreleased - YYYY-MM-DD +### Added +* Added `RcBlock::new(closure)` as a more efficient and flexible alternative + to `StackBlock::new(closure).to_rc()`. +* Added `StackBlock::to_rc` to convert stack blocks to `RcBlock`. + ### Changed +* **BREAKING**: Renamed `RcBlock::new(ptr)` to `RcBlock::from_raw(ptr)`. +* **BREAKING**: Made `RcBlock` use the null-pointer optimization; + `RcBlock::from_raw` and `RcBlock::copy` now return an `Option`. * **BREAKING**: Only expose the actually public symbols `_Block_copy`, `_Block_release`, `_Block_object_assign`, `_Block_object_dispose`, `_NSConcreteGlobalBlock`, `_NSConcreteStackBlock` and `Class` in `ffi` module. +* **BREAKING**: Renamed `IntoConcreteBlock` to `IntoBlock`. * No longer use the `block-sys` crate for linking to the blocks runtime. * Renamed `ConcreteBlock` to `StackBlock`. The old name is deprecated. -* **BREAKING**: Renamed `IntoConcreteBlock` to `IntoBlock`. +* Added `Copy` implementation for `StackBlock`. + +### Deprecated +* Deprecated `StackBlock::copy`, it is no longer necessary. ### Fixed +* **BREAKING**: `StackBlock::new` now requires the closure to be `Clone`. If + this is not desired, use `RcBlock::new` instead. * Relaxed the `F: Debug` bound on `StackBlock`'s `Debug` implementation. diff --git a/crates/block2/src/abi.rs b/crates/block2/src/abi.rs index fc9993d3b..984432e03 100644 --- a/crates/block2/src/abi.rs +++ b/crates/block2/src/abi.rs @@ -254,8 +254,14 @@ pub(crate) struct BlockDescriptorCopyDispose { pub(crate) size: c_ulong, /// Helper to copy the block if it contains nontrivial copy operations. + /// + /// This may be NULL since macOS 11.0.1 in Apple's runtime, but this + /// should not be relied on. pub(crate) copy: Option, /// Helper to destroy the block after being copied. + /// + /// This may be NULL since macOS 11.0.1 in Apple's runtime, but this + /// should not be relied on. pub(crate) dispose: Option, } @@ -293,8 +299,14 @@ pub(crate) struct BlockDescriptorCopyDisposeSignature { pub(crate) size: c_ulong, /// Helper to copy the block if it contains nontrivial copy operations. + /// + /// This may be NULL since macOS 11.0.1 in Apple's runtime, but this + /// should not be relied on. pub(crate) copy: Option, /// Helper to destroy the block after being copied. + /// + /// This may be NULL since macOS 11.0.1 in Apple's runtime, but this + /// should not be relied on. pub(crate) dispose: Option, /// Objective-C type encoding of the block. diff --git a/crates/block2/src/global.rs b/crates/block2/src/global.rs index 00e3634a1..98dcd962c 100644 --- a/crates/block2/src/global.rs +++ b/crates/block2/src/global.rs @@ -23,10 +23,10 @@ const GLOBAL_DESCRIPTOR: BlockDescriptor = BlockDescriptor { /// This is effectively a glorified function pointer, and can created and /// stored in static memory using the [`global_block!`] macro. /// -/// If [`StackBlock`] is the [`Fn`]-block equivalent, this is likewise the +/// If [`RcBlock`] is the [`Fn`]-block equivalent, this is likewise the /// [`fn`]-block equivalent. /// -/// [`StackBlock`]: crate::StackBlock +/// [`RcBlock`]: crate::RcBlock /// [`global_block!`]: crate::global_block #[repr(C)] pub struct GlobalBlock { diff --git a/crates/block2/src/lib.rs b/crates/block2/src/lib.rs index 22e53391f..0cb04555d 100644 --- a/crates/block2/src/lib.rs +++ b/crates/block2/src/lib.rs @@ -45,35 +45,112 @@ //! //! ## Creating blocks //! -//! Creating a block to pass to Objective-C can be done with the -//! [`StackBlock`] struct. For example, to create a block that adds two -//! integers, we could write: +//! Creating a block to pass to Objective-C can be done with [`RcBlock`] or +//! [`StackBlock`], depending on if you want to move the block to the heap, +//! or let the callee decide if it needs to do that. +//! +//! To declare external functions or methods that takes blocks, use +//! `&Block` or `Option<&Block>`, where `A` is a tuple with the +//! argument types, and `R` is the return type. +//! +//! As an example, we're going to work with a block that adds two integers. //! //! ``` -//! use block2::StackBlock; -//! let block = StackBlock::new(|a: i32, b: i32| a + b); -//! let block = block.copy(); -//! assert_eq!(unsafe { block.call((5, 8)) }, 13); +//! use block2::Block; +//! +//! // External function that takes a block +//! extern "C" { +//! fn add_numbers_using_block(block: &Block<(i32, i32), i32>); +//! } +//! # +//! # use objc2::ClassType; +//! # objc2::extern_class!( +//! # struct MyClass; +//! # +//! # unsafe impl ClassType for MyClass { +//! # type Super = objc2::runtime::NSObject; +//! # type Mutability = objc2::mutability::InteriorMutable; +//! # const NAME: &'static str = "NSObject"; +//! # } +//! # ); +//! +//! // External method that takes a block +//! objc2::extern_methods!( +//! unsafe impl MyClass { +//! #[method(addNumbersUsingBlock:)] +//! pub fn addNumbersUsingBlock(&self, block: &Block<(i32, i32), i32>); +//! } +//! ); //! ``` //! -//! It is important to copy your block to the heap (with the [`copy`] method) -//! before passing it to Objective-C; this is because our [`StackBlock`] is -//! only meant to be copied once, and we can enforce this in Rust, but if -//! Objective-C code were to copy it twice we could have a double free. +//! To call such a function / method, we could create a new block from a +//! closure using [`RcBlock::new`]. +//! +//! ``` +//! use block2::RcBlock; +//! # +//! # extern "C" { +//! # fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>); +//! # } +//! # mod imp { +//! # #[no_mangle] +//! # extern "C" fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>) { +//! # assert_eq!(unsafe { block.call((5, 8)) }, 13); +//! # } +//! # } +//! +//! let block = RcBlock::new(|a: i32, b: i32| a + b); +//! unsafe { add_numbers_using_block(&block) }; +//! ``` //! -//! [`copy`]: StackBlock::copy +//! This creates the block on the heap. If the external function you're +//! calling is not going to copy the block, it may be more performant if you +//! construct a [`StackBlock`] directly, using [`StackBlock::new`]. //! -//! As an optimization if your block doesn't capture any variables, you can -//! use the [`global_block!`] macro to create a static block: +//! Note though that this requires that the closure is [`Clone`], as the +//! external code may want to copy the block to the heap in the future. +//! +//! ``` +//! use block2::StackBlock; +//! # +//! # extern "C" { +//! # fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>); +//! # } +//! # mod imp { +//! # #[no_mangle] +//! # extern "C" fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>) { +//! # assert_eq!(unsafe { block.call((5, 8)) }, 13); +//! # } +//! # } +//! +//! let block = StackBlock::new(|a: i32, b: i32| a + b); +//! unsafe { add_numbers_using_block(&block) }; +//! ``` +//! +//! As an optimization if your block doesn't capture any variables (as in the +//! above examples), you can use the [`global_block!`] macro to create a +//! static block. //! //! ``` //! use block2::global_block; +//! # +//! # extern "C" { +//! # fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>); +//! # } +//! # mod imp { +//! # #[no_mangle] +//! # extern "C" fn add_numbers_using_block(block: &block2::Block<(i32, i32), i32>) { +//! # assert_eq!(unsafe { block.call((5, 8)) }, 13); +//! # } +//! # } +//! //! global_block! { -//! static MY_BLOCK = || -> f32 { -//! 10.0 +//! static MY_BLOCK = |a: i32, b: i32| -> i32 { +//! a + b //! }; //! } -//! assert_eq!(unsafe { MY_BLOCK.call(()) }, 10.0); +//! +//! unsafe { add_numbers_using_block(&MY_BLOCK) }; //! ``` //! //! diff --git a/crates/block2/src/rc_block.rs b/crates/block2/src/rc_block.rs index d7ce12de5..ff8169d64 100644 --- a/crates/block2/src/rc_block.rs +++ b/crates/block2/src/rc_block.rs @@ -1,70 +1,159 @@ use core::fmt; +use core::mem::ManuallyDrop; use core::ops::Deref; +use core::ptr::NonNull; + +use objc2::encode::EncodeReturn; use crate::abi::BlockHeader; use crate::debug::debug_block_header; -use crate::{ffi, Block}; +use crate::{ffi, Block, BlockArguments, IntoBlock, StackBlock}; -/// A reference-counted Objective-C block. +/// A reference-counted Objective-C block that is stored on the heap. +/// +/// This is a smart pointer that [`Deref`]s to [`Block`]. +/// +/// +/// # Memory-layout +/// +/// This is guaranteed to have the same size and alignment as a pointer to a +/// block, `*const Block`. +/// +/// Additionally, it participates in the null-pointer optimization, that is, +/// `Option>` is guaranteed to have the same size as +/// `RcBlock`. +#[doc(alias = "MallocBlock")] pub struct RcBlock { - pub(crate) ptr: *mut Block, + ptr: NonNull>, } impl RcBlock { - /// Construct an `RcBlock` for the given block without copying it. - /// The caller must ensure the block has a +1 reference count. + /// Construct an `RcBlock` from the given block pointer by taking + /// ownership. + /// + /// This will return `None` if the pointer is NULL. /// /// # Safety /// - /// The given pointer must point to a valid `Block` and must have a +1 - /// reference count or it will be overreleased when the `RcBlock` is - /// dropped. - pub unsafe fn new(ptr: *mut Block) -> Self { - RcBlock { ptr } + /// The given pointer must point to a valid block, the arguments and + /// return type must be correct, and the block must have a +1 reference / + /// retain count from somewhere else. + #[inline] + pub unsafe fn from_raw(ptr: *mut Block) -> Option { + NonNull::new(ptr).map(|ptr| Self { ptr }) } - /// Constructs an `RcBlock` by copying the given block. + /// Construct an `RcBlock` from the given block pointer. + /// + /// The block will be copied, and have its reference-count increased by + /// one. + /// + /// This will return `None` if the pointer is NULL, or if an allocation + /// failure occurred. /// /// # Safety /// - /// The given pointer must point to a valid `Block`. - pub unsafe fn copy(ptr: *mut Block) -> Self { - // SAFETY: The caller ensures the pointer is valid. + /// The given pointer must point to a valid block, and the arguments and + /// return type must be correct. + #[doc(alias = "Block_copy")] + #[doc(alias = "_Block_copy")] + #[inline] + pub unsafe fn copy(ptr: *mut Block) -> Option { let ptr: *mut Block = unsafe { ffi::_Block_copy(ptr.cast()) }.cast(); // SAFETY: We just copied the block, so the reference count is +1 + unsafe { Self::from_raw(ptr) } + } +} + +impl RcBlock { + /// Construct a `RcBlock` with the given closure. + /// + /// The closure will be coped to the heap on construction. + /// + /// When the block is called, it will return the value that results from + /// calling the closure. + // + // Note: Unsure if this should be #[inline], but I think it may be able to + // benefit from not being so. + pub fn new(closure: F) -> Self + where + // The `F: 'static` bound is required because the `RcBlock` has no way + // of tracking a lifetime. + F: IntoBlock + 'static, + { + // SAFETY: The stack block is copied once below. + // + // Note: We could theoretically use `_NSConcreteMallocBlock`, and use + // `malloc` ourselves to put the block on the heap, but that symbol is + // not part of the public ABI, and may break in the future. // - // TODO: Does _Block_copy always returns a valid pointer? - unsafe { Self::new(ptr) } + // Clang doesn't do this optimization either. + // + let block = unsafe { StackBlock::new_no_clone(closure) }; + + // Transfer ownership from the stack to the heap. + let mut block = ManuallyDrop::new(block); + let ptr: *mut StackBlock = &mut *block; + let ptr: *mut Block = ptr.cast(); + // SAFETY: The block will be moved to the heap, and we forget the + // original block because the heap block will drop in our dispose + // helper. + unsafe { Self::copy(ptr) }.unwrap_or_else(|| rc_new_fail()) } } impl Clone for RcBlock { - fn clone(&self) -> RcBlock { + #[inline] + fn clone(&self) -> Self { // SAFETY: The pointer is valid, since the only way to get an RcBlock // in the first place is through unsafe functions. - unsafe { RcBlock::copy(self.ptr) } + unsafe { Self::copy(self.ptr.as_ptr()) }.unwrap_or_else(|| rc_clone_fail()) } } +// Intentionally not `#[track_caller]`, to keep the code-size smaller (as this +// error is very unlikely). +pub(crate) fn rc_new_fail() -> ! { + // This likely means the system is out of memory. + panic!("failed creating RcBlock") +} + +// Intentionally not `#[track_caller]`, see above. +fn rc_clone_fail() -> ! { + unreachable!("cloning a RcBlock bumps the reference count, which should be infallible") +} + impl Deref for RcBlock { type Target = Block; + #[inline] fn deref(&self) -> &Block { - // SAFETY: The pointer is ensured valid by creator functions. - unsafe { self.ptr.as_ref().unwrap_unchecked() } + // SAFETY: The pointer is valid, as ensured by creation methods, and + // will be so for as long as the `RcBlock` is, since that holds +1 + // reference count. + unsafe { self.ptr.as_ref() } } } impl Drop for RcBlock { + /// Release the block, decreasing the reference-count by 1. + /// + /// The `Drop` method of the underlying closure will be called once the + /// reference-count reaches zero. + #[doc(alias = "Block_release")] + #[doc(alias = "_Block_release")] + #[inline] fn drop(&mut self) { - unsafe { ffi::_Block_release(self.ptr.cast()) }; + // SAFETY: The pointer has +1 reference count, as ensured by creation + // methods. + unsafe { ffi::_Block_release(self.ptr.as_ptr().cast()) }; } } impl fmt::Debug for RcBlock { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut f = f.debug_struct("RcBlock"); - let header = unsafe { self.ptr.cast::().as_ref().unwrap() }; + let header = unsafe { self.ptr.cast::().as_ref() }; debug_block_header(header, &mut f); f.finish_non_exhaustive() } diff --git a/crates/block2/src/stack.rs b/crates/block2/src/stack.rs index 80903c101..73da87e32 100644 --- a/crates/block2/src/stack.rs +++ b/crates/block2/src/stack.rs @@ -1,15 +1,18 @@ use core::ffi::c_void; use core::fmt; use core::marker::PhantomData; -use core::mem::{self, ManuallyDrop, MaybeUninit}; +use core::mem::{self, MaybeUninit}; use core::ops::Deref; -use core::ptr; +use core::ptr::{self, NonNull}; use std::os::raw::c_ulong; use objc2::encode::{EncodeArgument, EncodeReturn, Encoding, RefEncode}; -use crate::abi::{BlockDescriptorCopyDispose, BlockDescriptorPtr, BlockFlags, BlockHeader}; +use crate::abi::{ + BlockDescriptor, BlockDescriptorCopyDispose, BlockDescriptorPtr, BlockFlags, BlockHeader, +}; use crate::debug::debug_block_header; +use crate::rc_block::rc_new_fail; use crate::{ffi, Block, BlockArguments, RcBlock}; mod private { @@ -28,7 +31,7 @@ mod private { /// This is a sealed trait, and should not need to be implemented. Open an /// issue if you know a use-case where this restrition should be lifted! pub unsafe trait IntoBlock: private::Sealed + Sized { - /// The return type of the resulting `StackBlock`. + /// The return type of the resulting block. type Output: EncodeReturn; #[doc(hidden)] @@ -48,20 +51,22 @@ macro_rules! into_block_impl { { type Output = R; + #[inline] fn __get_invoke_stack_block() -> unsafe extern "C" fn() { unsafe extern "C" fn invoke<$($t,)* R, X>( - block: &StackBlock<($($t,)*), R, X>, + block: *mut StackBlock<($($t,)*), R, X>, $($a: $t,)* ) -> R where X: Fn($($t,)*) -> R, { - (block.closure)($($a),*) + let closure = unsafe { &*ptr::addr_of!((*block).closure) }; + (closure)($($a),*) } unsafe { mem::transmute::< - unsafe extern "C" fn(&StackBlock<($($t,)*), R, X>, $($a: $t,)*) -> R, + unsafe extern "C" fn(*mut StackBlock<($($t,)*), R, X>, $($a: $t,)*) -> R, unsafe extern "C" fn(), >(invoke) } @@ -154,67 +159,132 @@ into_block_impl!( l: L ); -/// An Objective-C block whose size is known at compile time and may be -/// constructed on the stack. +/// An Objective-C block constructed on the stack. +/// +/// This is a smart pointer that [`Deref`]s to [`Block`]. +/// +/// +/// # Memory layout +/// +/// The memory layout of this type is _not_ guaranteed. +/// +/// That said, it will always be safe to reintepret pointers to this as a +/// pointer to a `Block`. #[repr(C)] pub struct StackBlock { p: PhantomData>, - pub(crate) header: BlockHeader, - pub(crate) closure: F, + header: BlockHeader, + /// The block's closure. + /// + /// The ABI requires this field to come after the header. + /// + /// Note that this is not wrapped in a `ManuallyDrop`; once the + /// `StackBlock` is dropped, the closure will also be dropped. + closure: F, } -unsafe impl RefEncode for StackBlock { +// SAFETY: Pointers to the stack block is always safe to reintepret as an +// ordinary block pointer. +unsafe impl RefEncode for StackBlock { const ENCODING_REF: Encoding = Encoding::Block; } -impl StackBlock -where - A: BlockArguments, - R: EncodeReturn, - F: IntoBlock, -{ - /// Constructs a `StackBlock` with the given closure. - /// When the block is called, it will return the value that results from - /// calling the closure. - pub fn new(closure: F) -> Self { - unsafe { Self::with_invoke(F::__get_invoke_stack_block(), closure) } +// Basic constants and helpers. +impl StackBlock { + /// The size of the block header and the trailing closure. + /// + /// This ensures that the closure that the block contains is also moved to + /// the heap in `_Block_copy` operations. + const SIZE: c_ulong = mem::size_of::() as _; + + /// Drop the closure that this block contains. + unsafe extern "C" fn drop_closure(block: *mut c_void) { + let block: *mut Self = block.cast(); + // When this function is called, the block no longer lives on the + // stack, it has been moved to the heap as part of some `_Block_copy` + // operation, including ownership over the block. + // + // It is our task to ensure that the closure's data is properly + // disposed, which we do by calling `Drop`. + + // We use `addr_of_mut` here to not assume anything about the block's + // contents. This is probably overly cautious, `BlockHeader` already + // makes very few assumptions about the validity of the data it + // contains. + // + // SAFETY: The block pointer is valid, and contains the closure. + let closure = unsafe { ptr::addr_of_mut!((*block).closure) }; + // SAFETY: The ownership of the closure was moved into the block as + // part of some `_Block_copy` operation, and as such it is valid to + // drop here. + unsafe { ptr::drop_in_place(closure) }; } -} -impl StackBlock { - // TODO: Use new ABI with BLOCK_HAS_SIGNATURE - const FLAGS: BlockFlags = if mem::needs_drop::() { - BlockFlags::BLOCK_HAS_COPY_DISPOSE - } else { - BlockFlags::EMPTY + const DESCRIPTOR_BASIC: BlockDescriptor = BlockDescriptor { + reserved: 0, + size: Self::SIZE, }; +} + +// `StackBlock::new` +impl StackBlock { + /// Clone the closure from one block to another. + unsafe extern "C" fn clone_closure(dst: *mut c_void, src: *const c_void) { + let dst: *mut Self = dst.cast(); + let src: *const Self = src.cast(); + // When this function is called as part of some `_Block_copy` + // operation, the `dst` block has been constructed on the heap, and + // the `src` block has been `memmove`d to that. + // + // It is our task to ensure that the rest of the closure's data is + // properly copied, which we do by calling `Clone`. This newly cloned + // closure will be dropped in `drop_closure`. + + // We use `addr_of[_mut]` to not assume anything about the block's + // contents, see `drop_closure` for details. + // + // SAFETY: The block pointers are valid, and each contain the closure. + let dst_closure = unsafe { ptr::addr_of_mut!((*dst).closure) }; + let src_closure = unsafe { &*ptr::addr_of!((*src).closure) }; + // SAFETY: `dst` is valid for writes. + // TODO: How do we ensure proper alignment? + // + // Note: This is slightly inefficient, as we're overwriting the + // already `memmove`d data once more, which is unnecessary for closure + // captures that implement `Copy`. + unsafe { ptr::write(dst_closure, src_closure.clone()) }; + } - const DESCRIPTOR: BlockDescriptorCopyDispose = BlockDescriptorCopyDispose { + const DESCRIPTOR_WITH_CLONE: BlockDescriptorCopyDispose = BlockDescriptorCopyDispose { reserved: 0, - size: mem::size_of::() as c_ulong, - copy: if mem::needs_drop::() { - Some(block_context_copy::) - } else { - None - }, - dispose: if mem::needs_drop::() { - Some(block_context_dispose::) - } else { - None - }, + size: Self::SIZE, + copy: Some(Self::clone_closure), + dispose: Some(Self::drop_closure), }; - /// Constructs a `StackBlock` with the given invoke function and closure. - /// Unsafe because the caller must ensure the invoke function takes the - /// correct arguments. - unsafe fn with_invoke(invoke: unsafe extern "C" fn(), closure: F) -> Self { + /// Construct a `StackBlock` with the given closure. + /// + /// Note that this requires [`Clone`], as a C block is generally assumed + /// to be copy-able. If you want to avoid that, put the block directly on + /// the heap using [`RcBlock::new`]. + /// + /// When the block is called, it will return the value that results from + /// calling the closure. + #[inline] + pub fn new(closure: F) -> Self + where + F: IntoBlock, + { let header = BlockHeader { isa: unsafe { ptr::addr_of!(ffi::_NSConcreteStackBlock) }, - flags: Self::FLAGS, + // TODO: Add signature. + flags: BlockFlags::BLOCK_HAS_COPY_DISPOSE, reserved: MaybeUninit::new(0), - invoke: Some(invoke), + invoke: Some(F::__get_invoke_stack_block()), + // TODO: Use `Self::DESCRIPTOR_BASIC` when `F: Copy` + // (probably only possible with specialization). descriptor: BlockDescriptorPtr { - with_copy_dispose: &Self::DESCRIPTOR, + with_copy_dispose: &Self::DESCRIPTOR_WITH_CLONE, }, }; Self { @@ -223,45 +293,125 @@ impl StackBlock { closure, } } + + /// Copy the stack block onto the heap as an `RcBlock`. + /// + /// Most of the time you'll want to use [`RcBlock::new`] directly instead. + // + // TODO: Place this on `Block`, once that carries a lifetime. + #[inline] + pub fn to_rc(&self) -> RcBlock + where + // This bound is required because the `RcBlock` has no way of tracking + // a lifetime. + F: 'static, + { + let ptr: *const Self = self; + let ptr: *const Block = ptr.cast(); + let ptr: *mut Block = ptr as *mut _; + // SAFETY: A pointer to `StackBlock` is safe to convert to a pointer + // to `Block`, and because of the `F: 'static` lifetime bound, the + // block will be alive for the lifetime of the new `RcBlock`. + unsafe { RcBlock::copy(ptr) }.unwrap_or_else(|| rc_new_fail()) + } + + /// No longer necessary, the implementation does this for you when needed. + #[inline] + #[deprecated = "no longer necessary"] + pub fn copy(self) -> RcBlock + where + F: 'static, + { + self.to_rc() + } } -impl StackBlock { - /// Copy self onto the heap as an `RcBlock`. - pub fn copy(self) -> RcBlock { - // Our copy helper will run so the block will be moved to the heap - // and we can forget the original block because the heap block will - // drop in our dispose helper. TODO: Verify this. - let mut block = ManuallyDrop::new(self); - let ptr: *mut Self = &mut *block; - unsafe { RcBlock::copy(ptr.cast()) } +// `RcBlock::new` +impl StackBlock { + unsafe extern "C" fn empty_clone_closure(_dst: *mut c_void, _src: *const c_void) { + // We do nothing, the closure has been `memmove`'d already, and + // ownership will be passed in `RcBlock::new`. + } + + const DESCRIPTOR_WITH_DROP: BlockDescriptorCopyDispose = BlockDescriptorCopyDispose { + reserved: 0, + size: Self::SIZE, + copy: Some(Self::empty_clone_closure), + dispose: Some(Self::drop_closure), + }; + + /// # Safety + /// + /// `_Block_copy` must be called on the resulting stack block only once. + #[inline] + pub(crate) unsafe fn new_no_clone(closure: F) -> Self + where + F: IntoBlock, + { + // Don't need to emit copy and dispose helpers if the closure + // doesn't need it. + // + // TODO: Add signature. + let flags = if mem::needs_drop::() { + BlockFlags::BLOCK_HAS_COPY_DISPOSE + } else { + BlockFlags::EMPTY + }; + let descriptor = if mem::needs_drop::() { + BlockDescriptorPtr { + with_copy_dispose: &Self::DESCRIPTOR_WITH_DROP, + } + } else { + BlockDescriptorPtr { + basic: &Self::DESCRIPTOR_BASIC, + } + }; + + let header = BlockHeader { + isa: unsafe { ptr::addr_of!(ffi::_NSConcreteStackBlock) }, + flags, + reserved: MaybeUninit::new(0), + invoke: Some(F::__get_invoke_stack_block()), + descriptor, + }; + Self { + p: PhantomData, + header, + closure, + } } } impl Clone for StackBlock { + #[inline] fn clone(&self) -> Self { - unsafe { Self::with_invoke(self.header.invoke.unwrap(), self.closure.clone()) } + Self { + p: PhantomData, + header: self.header, + closure: self.closure.clone(), + } } } +impl Copy for StackBlock {} + impl Deref for StackBlock { type Target = Block; + #[inline] fn deref(&self) -> &Self::Target { - let ptr: *const Self = self; - let ptr: *const Block = ptr.cast(); - // TODO: SAFETY - unsafe { ptr.as_ref().unwrap_unchecked() } + let ptr: NonNull = NonNull::from(self); + let ptr: NonNull> = ptr.cast(); + // SAFETY: A pointer to `StackBlock` is always safe to convert to a + // pointer to `Block`, and the reference will be valid as long the + // stack block exists. + // + // Finally, the stack block is implemented correctly, such that it is + // safe to call `_Block_copy` on the returned value. + unsafe { ptr.as_ref() } } } -unsafe extern "C" fn block_context_dispose(block: *mut c_void) { - unsafe { ptr::drop_in_place(block.cast::()) }; -} - -unsafe extern "C" fn block_context_copy(_dst: *mut c_void, _src: *const c_void) { - // The runtime memmoves the src block into the dst block, nothing to do -} - impl fmt::Debug for StackBlock { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut f = f.debug_struct("StackBlock"); @@ -269,3 +419,20 @@ impl fmt::Debug for StackBlock { f.finish_non_exhaustive() } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_size() { + assert_eq!( + mem::size_of::(), + >::SIZE as _, + ); + assert_eq!( + mem::size_of::() + mem::size_of::(), + >::SIZE as _, + ); + } +} diff --git a/crates/icrate/src/additions/Foundation/data.rs b/crates/icrate/src/additions/Foundation/data.rs index ed9b4ab85..5c2f6d51e 100644 --- a/crates/icrate/src/additions/Foundation/data.rs +++ b/crates/icrate/src/additions/Foundation/data.rs @@ -2,12 +2,16 @@ #[cfg(feature = "block")] use alloc::vec::Vec; use core::fmt; +#[cfg(feature = "block")] +use core::mem::ManuallyDrop; use core::ops::Index; #[cfg(feature = "Foundation_NSMutableData")] use core::ops::{IndexMut, Range}; use core::panic::{RefUnwindSafe, UnwindSafe}; use core::slice::{self, SliceIndex}; +#[cfg(feature = "block")] +use block2::{Block, RcBlock}; #[cfg(feature = "block")] use objc2::rc::IdFromIterator; @@ -268,17 +272,12 @@ impl IdFromIterator for NSMutableData { #[cfg(feature = "block")] unsafe fn with_vec(obj: Allocated, bytes: Vec) -> Id { - use core::mem::ManuallyDrop; - - use block2::{Block, StackBlock}; - let capacity = bytes.capacity(); - let dealloc = StackBlock::new(move |bytes: *mut c_void, len: usize| unsafe { + let dealloc = RcBlock::new(move |bytes: *mut c_void, len: usize| { // Recreate the Vec and let it drop - let _ = >::from_raw_parts(bytes.cast(), len, capacity); + let _ = unsafe { >::from_raw_parts(bytes.cast(), len, capacity) }; }); - let dealloc = dealloc.copy(); let dealloc: &Block<(*mut c_void, usize), ()> = &dealloc; let mut bytes = ManuallyDrop::new(bytes); diff --git a/crates/test-assembly/crates/test_block/expected/apple-aarch64.s b/crates/test-assembly/crates/test_block/expected/apple-aarch64.s index 1b3d6c191..d8123eb5c 100644 --- a/crates/test-assembly/crates/test_block/expected/apple-aarch64.s +++ b/crates/test-assembly/crates/test_block/expected/apple-aarch64.s @@ -1,26 +1,4 @@ .section __TEXT,__text,regular,pure_instructions - .p2align 2 -SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0): - ret - - .p2align 2 -SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0): - ret - - .p2align 2 -SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0): - ldr x0, [x0, #32] - mov w1, #4 - mov w2, #4 - b ___rust_dealloc - - .p2align 2 -SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0): - ldr x0, [x0, #32] - mov w1, #4 - mov w2, #4 - b ___rust_dealloc - .p2align 2 SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): ldr x8, [x0, #32] @@ -52,37 +30,104 @@ SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::i .p2align 2 SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): - add w0, w1, #3 + add w0, w1, #2 + ret + + .p2align 2 +SYM(>::clone_closure, 0): + ret + + .p2align 2 +SYM(>::clone_closure, 0): + ret + + .p2align 2 +SYM(>::clone_closure, 0): + stp x20, x19, [sp, #-32]! + stp x29, x30, [sp, #16] + add x29, sp, #16 + mov x20, x1 + mov x19, x0 +Lloh0: + adrp x8, ___rust_no_alloc_shim_is_unstable@GOTPAGE +Lloh1: + ldr x8, [x8, ___rust_no_alloc_shim_is_unstable@GOTPAGEOFF] + ldrb wzr, [x8] + mov w0, #4 + mov w1, #4 + bl ___rust_alloc + cbz x0, LBB8_2 + ldr x8, [x20, #32] + ldr w8, [x8] + str w8, [x0] + str x0, [x19, #32] + ldp x29, x30, [sp, #16] + ldp x20, x19, [sp], #32 + ret +LBB8_2: + mov w0, #4 + mov w1, #4 + bl SYM(alloc::alloc::handle_alloc_error::GENERATED_ID, 0) + .loh AdrpLdrGot Lloh0, Lloh1 + + .p2align 2 +SYM(>::empty_clone_closure, 0): + ret + + .p2align 2 +SYM(>::drop_closure, 0): + ldr x0, [x0, #32] + mov w1, #4 + mov w2, #4 + b ___rust_dealloc + + .p2align 2 +SYM(>::drop_closure, 0): ret + .p2align 2 +SYM(>::drop_closure, 0): + ret + + .p2align 2 +SYM(>::drop_closure, 0): + ldr x0, [x0, #32] + mov w1, #4 + mov w2, #4 + b ___rust_dealloc + .globl _stack_block_to_rc .p2align 2 _stack_block_to_rc: sub sp, sp, #48 stp x29, x30, [sp, #32] add x29, sp, #32 -Lloh0: - adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh1: - ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] - stp x8, xzr, [sp] Lloh2: - adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE + adrp x8, __NSConcreteStackBlock@GOTPAGE Lloh3: - add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF + ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] + mov w9, #33554432 + stp x8, x9, [sp] Lloh4: - adrp x9, l_anon.[ID].1@PAGE + adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE Lloh5: - add x9, x9, l_anon.[ID].1@PAGEOFF + add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF +Lloh6: + adrp x9, l_anon.[ID].2@PAGE +Lloh7: + add x9, x9, l_anon.[ID].2@PAGEOFF stp x8, x9, [sp, #16] mov x0, sp bl __Block_copy + cbz x0, LBB14_2 ldp x29, x30, [sp, #32] add sp, sp, #48 ret +LBB14_2: + bl SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) + .loh AdrpAdd Lloh6, Lloh7 .loh AdrpAdd Lloh4, Lloh5 - .loh AdrpAdd Lloh2, Lloh3 - .loh AdrpLdrGot Lloh0, Lloh1 + .loh AdrpLdrGot Lloh2, Lloh3 .globl _rc_block .p2align 2 @@ -90,28 +135,31 @@ _rc_block: sub sp, sp, #48 stp x29, x30, [sp, #32] add x29, sp, #32 -Lloh6: +Lloh8: adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh7: +Lloh9: ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] stp x8, xzr, [sp] -Lloh8: +Lloh10: adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE -Lloh9: +Lloh11: add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF -Lloh10: +Lloh12: adrp x9, l_anon.[ID].1@PAGE -Lloh11: +Lloh13: add x9, x9, l_anon.[ID].1@PAGEOFF stp x8, x9, [sp, #16] mov x0, sp bl __Block_copy + cbz x0, LBB15_2 ldp x29, x30, [sp, #32] add sp, sp, #48 ret +LBB15_2: + bl SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) + .loh AdrpAdd Lloh12, Lloh13 .loh AdrpAdd Lloh10, Lloh11 - .loh AdrpAdd Lloh8, Lloh9 - .loh AdrpLdrGot Lloh6, Lloh7 + .loh AdrpLdrGot Lloh8, Lloh9 .globl _rc_block_drop .p2align 2 @@ -119,30 +167,33 @@ _rc_block_drop: sub sp, sp, #64 stp x29, x30, [sp, #48] add x29, sp, #48 -Lloh12: +Lloh14: adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh13: +Lloh15: ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] mov w9, #33554432 -Lloh14: +Lloh16: adrp x10, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE -Lloh15: +Lloh17: add x10, x10, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF stp x8, x9, [sp, #8] -Lloh16: +Lloh18: adrp x8, l_anon.[ID].0@PAGE -Lloh17: +Lloh19: add x8, x8, l_anon.[ID].0@PAGEOFF stp x10, x8, [sp, #24] str x0, [sp, #40] add x0, sp, #8 bl __Block_copy + cbz x0, LBB16_2 ldp x29, x30, [sp, #48] add sp, sp, #64 ret +LBB16_2: + bl SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) + .loh AdrpAdd Lloh18, Lloh19 .loh AdrpAdd Lloh16, Lloh17 - .loh AdrpAdd Lloh14, Lloh15 - .loh AdrpLdrGot Lloh12, Lloh13 + .loh AdrpLdrGot Lloh14, Lloh15 .globl _create_and_use_stack_block .p2align 2 @@ -150,28 +201,29 @@ _create_and_use_stack_block: sub sp, sp, #48 stp x29, x30, [sp, #32] add x29, sp, #32 -Lloh18: - adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh19: - ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] - stp x8, xzr, [sp] Lloh20: - adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE + adrp x8, __NSConcreteStackBlock@GOTPAGE Lloh21: - add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF + ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] + mov w9, #33554432 + stp x8, x9, [sp] Lloh22: - adrp x9, l_anon.[ID].1@PAGE + adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE Lloh23: - add x9, x9, l_anon.[ID].1@PAGEOFF + add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF +Lloh24: + adrp x9, l_anon.[ID].3@PAGE +Lloh25: + add x9, x9, l_anon.[ID].3@PAGEOFF stp x8, x9, [sp, #16] mov x0, sp bl _needs_block ldp x29, x30, [sp, #32] add sp, sp, #48 ret + .loh AdrpAdd Lloh24, Lloh25 .loh AdrpAdd Lloh22, Lloh23 - .loh AdrpAdd Lloh20, Lloh21 - .loh AdrpLdrGot Lloh18, Lloh19 + .loh AdrpLdrGot Lloh20, Lloh21 .globl _create_and_use_stack_block_drop .p2align 2 @@ -179,20 +231,20 @@ _create_and_use_stack_block_drop: sub sp, sp, #64 stp x29, x30, [sp, #48] add x29, sp, #48 -Lloh24: +Lloh26: adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh25: +Lloh27: ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] mov w9, #33554432 -Lloh26: +Lloh28: adrp x10, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE -Lloh27: +Lloh29: add x10, x10, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF stp x8, x9, [sp, #8] -Lloh28: - adrp x8, l_anon.[ID].2@PAGE -Lloh29: - add x8, x8, l_anon.[ID].2@PAGEOFF +Lloh30: + adrp x8, l_anon.[ID].4@PAGE +Lloh31: + add x8, x8, l_anon.[ID].4@PAGEOFF stp x10, x8, [sp, #24] str x0, [sp, #40] add x0, sp, #8 @@ -204,9 +256,9 @@ Lloh29: ldp x29, x30, [sp, #48] add sp, sp, #64 ret + .loh AdrpAdd Lloh30, Lloh31 .loh AdrpAdd Lloh28, Lloh29 - .loh AdrpAdd Lloh26, Lloh27 - .loh AdrpLdrGot Lloh24, Lloh25 + .loh AdrpLdrGot Lloh26, Lloh27 .globl _create_and_use_rc_block .p2align 2 @@ -215,22 +267,23 @@ _create_and_use_rc_block: stp x20, x19, [sp, #32] stp x29, x30, [sp, #48] add x29, sp, #48 -Lloh30: +Lloh32: adrp x8, __NSConcreteStackBlock@GOTPAGE -Lloh31: +Lloh33: ldr x8, [x8, __NSConcreteStackBlock@GOTPAGEOFF] stp x8, xzr, [sp] -Lloh32: +Lloh34: adrp x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGE -Lloh33: +Lloh35: add x8, x8, SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)@PAGEOFF -Lloh34: +Lloh36: adrp x9, l_anon.[ID].1@PAGE -Lloh35: +Lloh37: add x9, x9, l_anon.[ID].1@PAGEOFF stp x8, x9, [sp, #16] mov x0, sp bl __Block_copy + cbz x0, LBB19_2 mov x19, x0 bl _needs_block mov x0, x19 @@ -239,27 +292,41 @@ Lloh35: ldp x20, x19, [sp, #32] add sp, sp, #64 ret +LBB19_2: + bl SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) + .loh AdrpAdd Lloh36, Lloh37 .loh AdrpAdd Lloh34, Lloh35 - .loh AdrpAdd Lloh32, Lloh33 - .loh AdrpLdrGot Lloh30, Lloh31 + .loh AdrpLdrGot Lloh32, Lloh33 .section __DATA,__const .p2align 3, 0x0 l_anon.[ID].0: .asciz "\000\000\000\000\000\000\000\000(\000\000\000\000\000\000" - .quad SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0) - .quad SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0) + .quad SYM(>::empty_clone_closure, 0) + .quad SYM(>::drop_closure, 0) - .section __TEXT,__const + .section __TEXT,__literal16,16byte_literals .p2align 3, 0x0 l_anon.[ID].1: - .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" .section __DATA,__const .p2align 3, 0x0 l_anon.[ID].2: + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) + + .p2align 3, 0x0 +l_anon.[ID].3: + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) + + .p2align 3, 0x0 +l_anon.[ID].4: .asciz "\000\000\000\000\000\000\000\000(\000\000\000\000\000\000" - .quad SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0) - .quad SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0) + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) .subsections_via_symbols diff --git a/crates/test-assembly/crates/test_block/expected/apple-x86_64.s b/crates/test-assembly/crates/test_block/expected/apple-x86_64.s index 991db6eaf..d19ae0045 100644 --- a/crates/test-assembly/crates/test_block/expected/apple-x86_64.s +++ b/crates/test-assembly/crates/test_block/expected/apple-x86_64.s @@ -1,41 +1,41 @@ .section __TEXT,__text,regular,pure_instructions .intel_syntax noprefix .p2align 4, 0x90 -SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp + mov eax, esi + mov rcx, qword ptr [rdi + 32] + add eax, dword ptr [rcx] pop rbp ret .p2align 4, 0x90 -SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp + lea eax, [rsi + 2] pop rbp ret .p2align 4, 0x90 -SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp - mov rdi, qword ptr [rdi + 32] - mov esi, 4 - mov edx, 4 + lea eax, [rsi + 2] pop rbp - jmp ___rust_dealloc + ret .p2align 4, 0x90 -SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp - mov rdi, qword ptr [rdi + 32] - mov esi, 4 - mov edx, 4 + lea eax, [rsi + 2] pop rbp - jmp ___rust_dealloc + ret .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp mov eax, esi @@ -45,7 +45,7 @@ SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::i ret .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): push rbp mov rbp, rsp lea eax, [rsi + 2] @@ -53,39 +53,88 @@ SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::i ret .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(>::clone_closure, 0): push rbp mov rbp, rsp - lea eax, [rsi + 2] pop rbp ret .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(>::clone_closure, 0): push rbp mov rbp, rsp - lea eax, [rsi + 2] pop rbp ret .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(>::clone_closure, 0): push rbp mov rbp, rsp - mov eax, esi - mov rcx, qword ptr [rdi + 32] - add eax, dword ptr [rcx] + push r14 + push rbx + mov r14, rsi + mov rbx, rdi + mov rax, qword ptr [rip + ___rust_no_alloc_shim_is_unstable@GOTPCREL] + movzx eax, byte ptr [rax] + mov edi, 4 + mov esi, 4 + call ___rust_alloc + test rax, rax + je LBB8_2 + mov rcx, qword ptr [r14 + 32] + mov ecx, dword ptr [rcx] + mov dword ptr [rax], ecx + mov qword ptr [rbx + 32], rax + pop rbx + pop r14 pop rbp ret +LBB8_2: + mov edi, 4 + mov esi, 4 + call SYM(alloc::alloc::handle_alloc_error::GENERATED_ID, 0) .p2align 4, 0x90 -SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0): +SYM(>::empty_clone_closure, 0): push rbp mov rbp, rsp - lea eax, [rsi + 3] pop rbp ret + .p2align 4, 0x90 +SYM(>::drop_closure, 0): + push rbp + mov rbp, rsp + mov rdi, qword ptr [rdi + 32] + mov esi, 4 + mov edx, 4 + pop rbp + jmp ___rust_dealloc + + .p2align 4, 0x90 +SYM(>::drop_closure, 0): + push rbp + mov rbp, rsp + pop rbp + ret + + .p2align 4, 0x90 +SYM(>::drop_closure, 0): + push rbp + mov rbp, rsp + pop rbp + ret + + .p2align 4, 0x90 +SYM(>::drop_closure, 0): + push rbp + mov rbp, rsp + mov rdi, qword ptr [rdi + 32] + mov esi, 4 + mov edx, 4 + pop rbp + jmp ___rust_dealloc + .globl _stack_block_to_rc .p2align 4, 0x90 _stack_block_to_rc: @@ -94,16 +143,20 @@ _stack_block_to_rc: sub rsp, 32 mov rax, qword ptr [rip + __NSConcreteStackBlock@GOTPCREL] mov qword ptr [rbp - 32], rax - mov qword ptr [rbp - 24], 0 + mov qword ptr [rbp - 24], 33554432 lea rax, [rip + SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)] mov qword ptr [rbp - 16], rax - lea rax, [rip + l_anon.[ID].1] + lea rax, [rip + l_anon.[ID].2] mov qword ptr [rbp - 8], rax lea rdi, [rbp - 32] call __Block_copy + test rax, rax + je LBB14_2 add rsp, 32 pop rbp ret +LBB14_2: + call SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) .globl _rc_block .p2align 4, 0x90 @@ -116,13 +169,17 @@ _rc_block: mov qword ptr [rbp - 24], 0 lea rax, [rip + SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)] mov qword ptr [rbp - 16], rax - lea rax, [rip + l_anon.[ID].1] + lea rax, [rip + L_anon.[ID].1] mov qword ptr [rbp - 8], rax lea rdi, [rbp - 32] call __Block_copy + test rax, rax + je LBB15_2 add rsp, 32 pop rbp ret +LBB15_2: + call SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) .globl _rc_block_drop .p2align 4, 0x90 @@ -140,9 +197,13 @@ _rc_block_drop: mov qword ptr [rbp - 8], rdi lea rdi, [rbp - 40] call __Block_copy + test rax, rax + je LBB16_2 add rsp, 48 pop rbp ret +LBB16_2: + call SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) .globl _create_and_use_stack_block .p2align 4, 0x90 @@ -152,10 +213,10 @@ _create_and_use_stack_block: sub rsp, 32 mov rax, qword ptr [rip + __NSConcreteStackBlock@GOTPCREL] mov qword ptr [rbp - 32], rax - mov qword ptr [rbp - 24], 0 + mov qword ptr [rbp - 24], 33554432 lea rax, [rip + SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)] mov qword ptr [rbp - 16], rax - lea rax, [rip + l_anon.[ID].1] + lea rax, [rip + l_anon.[ID].3] mov qword ptr [rbp - 8], rax lea rdi, [rbp - 32] call _needs_block @@ -174,7 +235,7 @@ _create_and_use_stack_block_drop: mov qword ptr [rbp - 32], 33554432 lea rax, [rip + SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)] mov qword ptr [rbp - 24], rax - lea rax, [rip + l_anon.[ID].2] + lea rax, [rip + l_anon.[ID].4] mov qword ptr [rbp - 16], rax mov qword ptr [rbp - 8], rdi lea rdi, [rbp - 40] @@ -199,10 +260,12 @@ _create_and_use_rc_block: mov qword ptr [rbp - 32], 0 lea rax, [rip + SYM(<_ as block2[CRATE_ID]::stack::IntoBlock<(_,)>>::__get_invoke_stack_block::invoke::, 0)] mov qword ptr [rbp - 24], rax - lea rax, [rip + l_anon.[ID].1] + lea rax, [rip + L_anon.[ID].1] mov qword ptr [rbp - 16], rax lea rdi, [rbp - 40] call __Block_copy + test rax, rax + je LBB19_2 mov rbx, rax mov rdi, rax call _needs_block @@ -212,24 +275,38 @@ _create_and_use_rc_block: pop rbx pop rbp ret +LBB19_2: + call SYM(block2::rc_block::rc_new_fail::GENERATED_ID, 0) .section __DATA,__const .p2align 3, 0x0 l_anon.[ID].0: .asciz "\000\000\000\000\000\000\000\000(\000\000\000\000\000\000" - .quad SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0) - .quad SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0) + .quad SYM(>::empty_clone_closure, 0) + .quad SYM(>::drop_closure, 0) - .section __TEXT,__const + .section __TEXT,__literal16,16byte_literals .p2align 3, 0x0 -l_anon.[ID].1: - .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" +L_anon.[ID].1: + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" .section __DATA,__const .p2align 3, 0x0 l_anon.[ID].2: + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) + + .p2align 3, 0x0 +l_anon.[ID].3: + .asciz "\000\000\000\000\000\000\000\000 \000\000\000\000\000\000" + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) + + .p2align 3, 0x0 +l_anon.[ID].4: .asciz "\000\000\000\000\000\000\000\000(\000\000\000\000\000\000" - .quad SYM(block2[CRATE_ID]::stack::block_context_copy::>, 0) - .quad SYM(block2[CRATE_ID]::stack::block_context_dispose::>, 0) + .quad SYM(>::clone_closure, 0) + .quad SYM(>::drop_closure, 0) .subsections_via_symbols diff --git a/crates/test-assembly/crates/test_block/lib.rs b/crates/test-assembly/crates/test_block/lib.rs index 54911e517..5469efa0e 100644 --- a/crates/test-assembly/crates/test_block/lib.rs +++ b/crates/test-assembly/crates/test_block/lib.rs @@ -6,17 +6,17 @@ use block2::{Block, RcBlock, StackBlock}; #[no_mangle] fn stack_block_to_rc() -> RcBlock<(i32,), i32> { - StackBlock::new(|x| x + 2).copy() + StackBlock::new(|x| x + 2).to_rc() } #[no_mangle] fn rc_block() -> RcBlock<(i32,), i32> { - StackBlock::new(|x| x + 3).copy() + RcBlock::new(|x| x + 2) } #[no_mangle] fn rc_block_drop(b: Box) -> RcBlock<(i32,), i32> { - StackBlock::new(move |x| x + *b).copy() + RcBlock::new(move |x| x + *b) } extern "C" { @@ -37,6 +37,6 @@ fn create_and_use_stack_block_drop(b: Box) { #[no_mangle] fn create_and_use_rc_block() { - let block = StackBlock::new(|x| x + 2).copy(); + let block = RcBlock::new(|x| x + 2); unsafe { needs_block(&block) }; } diff --git a/crates/tests/src/block.rs b/crates/tests/src/block.rs new file mode 100644 index 000000000..aa6a95f10 --- /dev/null +++ b/crates/tests/src/block.rs @@ -0,0 +1,220 @@ +use core::cell::RefCell; +use std::thread_local; + +use block2::{Block, RcBlock, StackBlock}; +use objc2::{rc::Id, runtime::AnyObject}; + +#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] +struct Count { + new: usize, + clone: usize, + drop: usize, +} + +thread_local! { + static COUNT: RefCell = RefCell::default(); +} + +impl Count { + fn current() -> Self { + COUNT.with_borrow(|data| data.clone()) + } + + #[track_caller] + fn assert_current(&self) { + let current = Self::current(); + if current != *self { + panic!( + "got differing amounts of calls: + current: `{current:?}`, + expected: `{self:?}`" + ) + } + } +} + +struct CloneDropTracker(()); + +impl CloneDropTracker { + fn new() -> Self { + COUNT.with_borrow_mut(|count| { + count.new += 1; + }); + Self(()) + } +} + +impl Clone for CloneDropTracker { + fn clone(&self) -> Self { + COUNT.with_borrow_mut(|count| { + count.clone += 1; + }); + Self(()) + } +} + +impl Drop for CloneDropTracker { + fn drop(&mut self) { + COUNT.with_borrow_mut(|count| { + count.drop += 1; + }); + } +} + +#[test] +fn stack_new_clone_drop() { + let mut expected = Count::current(); + + let counter = CloneDropTracker::new(); + expected.new += 1; + expected.assert_current(); + + let block = StackBlock::new(move || { + let _ = &counter; + }); + expected.assert_current(); + + let clone = block.clone(); + expected.clone += 1; + expected.assert_current(); + + drop(clone); + expected.drop += 1; + expected.assert_current(); + + drop(block); + expected.drop += 1; + expected.assert_current(); +} + +#[test] +fn rc_new_clone_drop() { + let mut expected = Count::current(); + + let counter = CloneDropTracker::new(); + expected.new += 1; + expected.assert_current(); + + let block = RcBlock::new(move || { + let _ = &counter; + }); + expected.assert_current(); + + let clone = block.clone(); + expected.assert_current(); + + drop(clone); + expected.assert_current(); + + drop(block); + expected.drop += 1; + expected.assert_current(); +} + +#[test] +fn stack_to_rc() { + let mut expected = Count::current(); + + let counter = CloneDropTracker::new(); + expected.new += 1; + expected.assert_current(); + + let stack = StackBlock::new(move || { + let _ = &counter; + }); + expected.assert_current(); + + let rc1 = stack.to_rc(); + expected.clone += 1; + expected.assert_current(); + + let rc2 = stack.to_rc(); + expected.clone += 1; + expected.assert_current(); + + let clone2 = rc2.clone(); + expected.assert_current(); + + drop(rc2); + expected.assert_current(); + + drop(stack); + expected.drop += 1; + expected.assert_current(); + + drop(rc1); + expected.drop += 1; + expected.assert_current(); + + drop(clone2); + expected.drop += 1; + expected.assert_current(); +} + +#[test] +fn retain_release_rc_block() { + let mut expected = Count::current(); + + let counter = CloneDropTracker::new(); + expected.new += 1; + expected.assert_current(); + + let block = RcBlock::new(move || { + let _ = &counter; + }); + expected.assert_current(); + + let ptr = &*block as *const Block<_, _> as *mut AnyObject; + let obj = unsafe { Id::retain(ptr) }.unwrap(); + expected.assert_current(); + + drop(block); + expected.assert_current(); + + drop(obj); + expected.drop += 1; + expected.assert_current(); +} + +/// Retaining/releasing stack blocks is kinda weird and unsupported. +/// +/// As an example, the reference count is not increased for stack blocks on +/// Apple's runtime, while on GNUStep, the `-retain` returns the new block, +/// which is generally very unexpected behaviour. +#[test] +fn retain_release_stack_block() { + let mut expected = Count::current(); + + let counter = CloneDropTracker::new(); + expected.new += 1; + expected.assert_current(); + + let block = StackBlock::new(move || { + let _ = &counter; + }); + expected.assert_current(); + + let ptr = &*block as *const Block<_, _> as *mut AnyObject; + // Don't use `Id::retain`, as that has debug assertions against the kind + // of things GNUStep is doing. + let obj = if cfg!(feature = "gnustep-1-7") { + let ptr = unsafe { objc2::ffi::objc_retain(ptr.cast()).cast() }; + unsafe { Id::from_raw(ptr) }.unwrap() + } else { + unsafe { Id::retain(ptr) }.unwrap() + }; + if cfg!(feature = "gnustep-1-7") { + expected.clone += 1; + } + expected.assert_current(); + + drop(obj); + if cfg!(feature = "gnustep-1-7") { + expected.drop += 1; + } + expected.assert_current(); + + drop(block); + expected.drop += 1; + expected.assert_current(); +} diff --git a/crates/tests/src/ffi.rs b/crates/tests/src/ffi.rs index def91c157..5c4325ce2 100644 --- a/crates/tests/src/ffi.rs +++ b/crates/tests/src/ffi.rs @@ -3,19 +3,6 @@ use std::os::raw::c_void; use block2::Block; use objc2::{Encode, Encoding}; -/// A block that takes no arguments and returns an integer: `int32_t (^)()`. -#[repr(C)] -pub struct IntBlock { - _priv: [u8; 0], -} - -/// A block that takes one integer argument, adds to it, and returns the sum: -/// `int32_t (^)(int32_t)`. -#[repr(C)] -pub struct AddBlock { - _priv: [u8; 0], -} - #[repr(C)] #[derive(Debug, Clone, Copy, PartialEq)] pub struct LargeStruct { @@ -42,28 +29,26 @@ unsafe impl Encode for LargeStruct { Encoding::Struct("LargeStruct", &[f32::ENCODING, <[u8; 100]>::ENCODING]); } -#[repr(C)] -pub struct LargeStructBlock { - _priv: [u8; 0], -} - extern "C" { - /// Returns a pointer to a global `IntBlock` that returns 7. - pub fn get_int_block() -> *mut IntBlock; - /// Returns a pointer to a copied `IntBlock` that returns `i`. - pub fn get_int_block_with(i: i32) -> *mut IntBlock; - /// Returns a pointer to a global `AddBlock` that returns its argument + 7. - pub fn get_add_block() -> *mut AddBlock; - /// Returns a pointer to a copied `AddBlock` that returns its argument + `i`. - pub fn get_add_block_with(i: i32) -> *mut AddBlock; - /// Invokes an `IntBlock` and returns its result. - pub fn invoke_int_block(block: *mut IntBlock) -> i32; - /// Invokes an `AddBlock` with `a` and returns the result. - pub fn invoke_add_block(block: *mut AddBlock, a: i32) -> i32; - - pub fn get_large_struct_block() -> *mut LargeStructBlock; - pub fn get_large_struct_block_with(i: LargeStruct) -> *mut LargeStructBlock; - pub fn invoke_large_struct_block(block: *mut LargeStructBlock, s: LargeStruct) -> LargeStruct; + /// Returns a pointer to a global block that returns 7. + pub fn get_int_block() -> *mut Block<(), i32>; + /// Returns a pointer to a copied block that returns `i`. + pub fn get_int_block_with(i: i32) -> *mut Block<(), i32>; + /// Returns a pointer to a global block that returns its argument + 7. + pub fn get_add_block() -> *mut Block<(i32,), i32>; + /// Returns a pointer to a copied block that returns its argument + `i`. + pub fn get_add_block_with(i: i32) -> *mut Block<(i32,), i32>; + /// Invokes a block and returns its result. + pub fn invoke_int_block(block: &Block<(), i32>) -> i32; + /// Invokes a block with `a` and returns the result. + pub fn invoke_add_block(block: &Block<(i32,), i32>, a: i32) -> i32; + + pub fn get_large_struct_block() -> *mut Block<(LargeStruct,), LargeStruct>; + pub fn get_large_struct_block_with(i: LargeStruct) -> *mut Block<(LargeStruct,), LargeStruct>; + pub fn invoke_large_struct_block( + block: &Block<(LargeStruct,), LargeStruct>, + s: LargeStruct, + ) -> LargeStruct; pub fn try_block_debugging(x: i32); } @@ -88,16 +73,16 @@ mod tests { #[test] fn test_int_block() { unsafe { - assert_eq!(invoke_int_block(get_int_block()), 7); - assert_eq!(invoke_int_block(get_int_block_with(13)), 13); + assert_eq!(invoke_int_block(&*get_int_block()), 7); + assert_eq!(invoke_int_block(&*get_int_block_with(13)), 13); } } #[test] fn test_add_block() { unsafe { - assert_eq!(invoke_add_block(get_add_block(), 5), 12); - assert_eq!(invoke_add_block(get_add_block_with(3), 5), 8); + assert_eq!(invoke_add_block(&*get_add_block(), 5), 12); + assert_eq!(invoke_add_block(&*get_add_block_with(3), 5), 8); } } @@ -108,11 +93,11 @@ mod tests { expected.mutate(); assert_eq!( - unsafe { invoke_large_struct_block(get_large_struct_block(), data) }, + unsafe { invoke_large_struct_block(&*get_large_struct_block(), data) }, expected ); assert_eq!( - unsafe { invoke_large_struct_block(get_large_struct_block_with(expected), data) }, + unsafe { invoke_large_struct_block(&*get_large_struct_block_with(expected), data) }, expected ); } diff --git a/crates/tests/src/lib.rs b/crates/tests/src/lib.rs index d1dae961b..ea1add68f 100644 --- a/crates/tests/src/lib.rs +++ b/crates/tests/src/lib.rs @@ -7,6 +7,8 @@ use block2::{Block, RcBlock}; extern crate alloc; extern crate std; +#[cfg(test)] +mod block; #[cfg(all(test, feature = "exception"))] mod exception; pub mod ffi; @@ -24,40 +26,37 @@ use crate::ffi::LargeStruct; pub fn get_int_block_with(i: i32) -> RcBlock<(), i32> { unsafe { let ptr = ffi::get_int_block_with(i); - RcBlock::new(ptr as *mut _) + RcBlock::from_raw(ptr).unwrap() } } pub fn get_add_block_with(i: i32) -> RcBlock<(i32,), i32> { unsafe { let ptr = ffi::get_add_block_with(i); - RcBlock::new(ptr as *mut _) + RcBlock::from_raw(ptr).unwrap() } } pub fn invoke_int_block(block: &Block<(), i32>) -> i32 { - let ptr = block as *const _; - unsafe { ffi::invoke_int_block(ptr as *mut _) } + unsafe { ffi::invoke_int_block(block) } } pub fn invoke_add_block(block: &Block<(i32,), i32>, a: i32) -> i32 { - let ptr = block as *const _; - unsafe { ffi::invoke_add_block(ptr as *mut _, a) } + unsafe { ffi::invoke_add_block(block, a) } } pub fn invoke_large_struct_block( block: &Block<(LargeStruct,), LargeStruct>, x: LargeStruct, ) -> LargeStruct { - let ptr = block as *const _; - unsafe { ffi::invoke_large_struct_block(ptr as *mut _, x) } + unsafe { ffi::invoke_large_struct_block(block, x) } } #[cfg(test)] mod tests { use super::*; use alloc::string::ToString; - use block2::{global_block, RcBlock, StackBlock}; + use block2::{global_block, StackBlock}; global_block! { /// Test `global_block` in an external crate @@ -108,16 +107,15 @@ mod tests { let block = StackBlock::new(move || s.len() as i32); assert_eq!(invoke_int_block(&block), expected_len); - let copied = block.copy(); + let copied = block.to_rc(); assert_eq!(invoke_int_block(&copied), expected_len); } #[test] - fn test_block_stack_copy() { - fn make_block() -> RcBlock<(), i32> { + fn test_block_stack_move() { + fn make_block() -> StackBlock<(), i32, impl Fn() -> i32> { let x = 7; - let block = StackBlock::new(move || x); - block.copy() + StackBlock::new(move || x) } let block = make_block(); @@ -146,7 +144,7 @@ mod tests { x }); assert_eq!(invoke_large_struct_block(&block, data), new_data); - let block = block.copy(); + let block = block.to_rc(); assert_eq!(invoke_large_struct_block(&block, data), new_data); } }