linux-imx/rust/alloc/alloc.rs
Alice Ryhl bd197c334a ANDROID: revert Rust Binder changes in android15-6.6
This reverts the following commits:
0347be8c14 ANDROID: rust: disable floating point target features
013c5ddc64 ANDROID: rust_binder: fix leak of name in binderfs
2f4e6b1def ANDROID: Disable warning about new bindgen
3396c2131d ANDROID: rust: use target.json for aarch64
1656e8d99d ANDROID: rust: rustfmt scripts/generate_rust_target.rs
42eb612b25 ANDROID: Enable Rust Binder Module
cb0bef6d8e ANDROID: rust_binder: Switch Rust Binder to GKI Module
b511e79bc5 ANDROID: Only warn on long symbols
4a571c32cb ANDROID: Re-enable Rust symbol export
08dde7cab7 ANDROID: rust_binder: properly handle critical refcount increments
39285e9a6e ANDROID: rust_binder: split out logic for zero-to-one refcount increment
76784ba7b8 ANDROID: rust_binder: don't use boolean to schedule nodes
64018a934a ANDROID: rust_binder: don't pr_warn on benign errors
9ff51354fd ANDROID: use 1.73.0b compiler prebuilt
ed1360bdfa ANDROID: rust_binder: fix CFI failure in rust_shrink_free_page
741b6ae073 ANDROID: rust_binder: don't use default implementations
bee37573f9 ANDROID: rust_binder: add missing void arguments to binderfs files
8ae6dec97f ANDROID: rust_binder: fix ioctl_freeze
c9370a3cbb ANDROID: rust_binder: return pending info for frozen async txns
f6d2f5ebcb ANDROID: rust_binder: various minor improvements
d27c2c39fd ANDROID: rust_binder: add elapsed to binder_logs/state
9ef1712688 FROMLIST: rust: time: add Ktime
306e7bafe8 FROMLIST: rust: don't select CONSTRUCTORS
5424a7cb43 FROMLIST: rust: add flags for shadow call stack sanitizer
c42560464f ANDROID: binder: update Kconfig for choosing between implementations
87f0a459c0 ANDROID: rust: uaccess: fix length usage in read_all
c6c6be80a8 ANDROID: rust: use Rust atomics for Arc refcount
282b78baf0 ANDROID: rust_binder: choose driver with kcmdline flag
01ac18bd09 ANDROID: rust_binder: add binder_logs/state
ec1855352b ANDROID: rust_binder: fail if objects are out of order
ac68e23d38 ANDROID: rust_binder: add vma shrinker
2e4f09314c ANDROID: rust_binder: add priority inheritance
691f0f1682 ANDROID: rust_binder: add oneway spam detection
6249f06992 ANDROID: rust_binder: add TF_UPDATE_TXN support
50259804b1 ANDROID: rust_binder: add process freezing
571343fd97 ANDROID: rust_binder: add BINDER_TYPE_FDA support
b427bc70cf ANDROID: rust_binder: add BINDER_TYPE_FD support
451e930fc0 ANDROID: rust_binder: add BINDER_TYPE_PTR support
ae1d247379 ANDROID: rust_binder: send nodes in transactions
892df033dc ANDROID: rust_binder: add death notifications
fe5dea5fc7 ANDROID: rust_binder: serialize oneway transactions
b67f39008f ANDROID: rust_binder: add non-oneway transactions
98cf4f1b76 ANDROID: rust_binder: add epoll support
bb1d504151 ANDROID: rust_binder: add oneway transactions
0ccb57c72d ANDROID: rust_binder: add nodes and context managers
6feafb413a ANDROID: rust_binder: add work lists
046eb3d735 ANDROID: rust_binder: add threading support
0d512d37b0 ANDROID: rust_binder: add binderfs support to Rust binder
adfa541d88 ANDROID: rust_binder: define a Rust binder driver
b1661c9e45 ANDROID: rust: add linked list implementation
ec656ad4c9 ANDROID: rust: sync: add `Arc::into_unique_or_drop`
b57704d237 ANDROID: rust: sync: add `ArcBorrow::from_raw`
a1bb998934 ANDROID: rust: mm: add bindings for memory management
67fe8ee749 FROMLIST: rust: add abstraction for `struct page`
6896fba16f FROMLIST: rust: uaccess: add typed accessors for userspace pointers
881c1e2f85 FROMLIST: uaccess: always export _copy_[from|to]_user with CONFIG_RUST
3629eb70a6 FROMLIST: rust: uaccess: add userspace pointers
84e0b6c8ef FROMLIST: rust: rbtree: add `RBTree::entry`
c226168bb5 FROMLIST: rust: rbtree: add `RBTreeCursor`
a3e1a10677 FROMLIST: rust: rbtree: add `RBTreeIteratorMut`
97cbf6e271 FROMLIST: rust: rbtree: add `RBTreeIterator`
adfa8383a5 FROMLIST: rust: rbtree: add red-black tree implementation backed by the C version
4cb6d81d02 FROMLIST: rust: add `container_of!` macro
879f437248 FROMLIST: rust: file: add abstraction for `poll_table`
4aa358bcc5 FROMLIST: rust: file: add `DeferredFdCloser`
8abb138ec8 FROMLIST: rust: file: add `Kuid` wrapper
f764ce9b2b FROMLIST: rust: file: add `FileDescriptorReservation`
a34dbbde7c FROMLIST: rust: security: add abstraction for secctx
c0b96a7765 FROMLIST: rust: cred: add Rust abstraction for `struct cred`
40eecc8bdf FROMLIST: rust: file: add Rust abstraction for `struct file`
c8ae5646c9 FROMLIST: rust: task: add `Task::current_raw`
6b4006d2d7 FROMLIST: rust: types: add `NotThreadSafe`
6bb35a6e52 FROMGIT: rust: sync: update integer types in CondVar
bd6e290154 FROMGIT: rust: sync: add `CondVar::wait_timeout`
117298b362 FROMGIT: rust: time: add msecs to jiffies conversion
d6ce2b7bd7 FROMGIT: rust: sync: add `CondVar::notify_sync`
495235c0f3 FROMGIT: rust: sync: `CondVar` rename "wait_list" to "wait_queue_head"
4c5e3e9f01 UPSTREAM: binder: use enum for binder ioctls
fbb9dbf62c UPSTREAM: rust: support `srctree`-relative links
0f40dd3c65 UPSTREAM: rust: sync: Makes `CondVar::wait()` an uninterruptible wait
5e2edfbc03 UPSTREAM: rust: macros: improve `#[vtable]` documentation
8bd5093c30 UPSTREAM: rust: macros: update 'paste!' macro to accept string literals
63a42cf758 UPSTREAM: rust: bindings: rename const binding using sed
412965714b UPSTREAM: rust: replace <linux/module.h> with <linux/export.h> in rust/exports.c
1765f2a273 UPSTREAM: rust: kernel: str: Implement Debug for CString
fe69811059 UPSTREAM: rust: task: remove redundant explicit link
f2497a4bb0 UPSTREAM: rust: kernel: remove `#[allow(clippy::new_ret_no_self)]`
a6ab1c4b59 UPSTREAM: rust: workqueue: add examples
3525914f07 UPSTREAM: rust: workqueue: add `try_spawn` helper method
7b431db132 UPSTREAM: rust: workqueue: implement `WorkItemPointer` for pointer types
c5e9e92b74 UPSTREAM: rust: workqueue: add helper for defining work_struct fields
fef46ccba9 UPSTREAM: rust: workqueue: define built-in queues
37fe02321c UPSTREAM: rust: workqueue: add low-level workqueue bindings
a0c639eadc UPSTREAM: rust: sync: add `Arc::{from_raw, into_raw}`
37e90c8aef ANDROID: rust: Enable KCFI support when available
6be7b55440 ANDROID: rust: Use From instances for JSON encoder
2b93c38ece ANDROID: rust: Support arrays in target JSON
4085063afb ANDROID: rust: allow MODVERSIONS by no longer exporting symbols
e019515a16 ANDROID: rust: Select rustc-1.73.0
ae876716fc FROMLIST: x86: rust: Disable entry padding with Rust
b802a765fb FROMLIST: arm64: rust: Enable Rust support for AArch64
433a394399 FROMLIST: rust: Refactor the build target to allow the use of builtin targets
5eb6b43401 UPSTREAM: rust: Suppress searching builtin sysroot
9b8f3fdda6 UPSTREAM: rust: Ignore preserve-most functions
5341e12502 UPSTREAM: x86: Enable IBT in Rust if enabled in C
37481c8f5a UPSTREAM: rust: upgrade to Rust 1.73.0
42092eccd9 UPSTREAM: rust: upgrade to Rust 1.72.1
30043aaafb UPSTREAM: rust: arc: add explicit `drop()` around `Box::from_raw()`
3365cb02fe UPSTREAM: rust: Use grep -Ev rather than relying on GNU grep
5dd966b03a UPSTREAM: rust: Use awk instead of recent xargs
8a38f60333 UPSTREAM: rust: Respect HOSTCC when linking for host

Fixes: adfa541d88 ("ANDROID: rust_binder: define a Rust binder driver")
Change-Id: Id70e8c42c0b2b31753998c9b5d12e5c449aeafd7
Signed-off-by: Alice Ryhl <aliceryhl@google.com>
2024-08-14 09:00:02 +00:00

450 lines
16 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// SPDX-License-Identifier: Apache-2.0 OR MIT
//! Memory allocation APIs
#![stable(feature = "alloc_module", since = "1.28.0")]
#[cfg(not(test))]
use core::intrinsics;
use core::intrinsics::{min_align_of_val, size_of_val};
use core::ptr::Unique;
#[cfg(not(test))]
use core::ptr::{self, NonNull};
#[stable(feature = "alloc_module", since = "1.28.0")]
#[doc(inline)]
pub use core::alloc::*;
#[cfg(test)]
mod tests;
extern "Rust" {
// These are the magic symbols to call the global allocator. rustc generates
// them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
// (the code expanding that attribute macro generates those functions), or to call
// the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
// otherwise.
// The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them
// like `malloc`, `realloc`, and `free`, respectively.
#[rustc_allocator]
#[rustc_nounwind]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
#[rustc_deallocator]
#[rustc_nounwind]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_reallocator]
#[rustc_nounwind]
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
#[rustc_allocator_zeroed]
#[rustc_nounwind]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
#[cfg(not(bootstrap))]
static __rust_no_alloc_shim_is_unstable: u8;
}
/// The global memory allocator.
///
/// This type implements the [`Allocator`] trait by forwarding calls
/// to the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
/// Note: while this type is unstable, the functionality it provides can be
/// accessed through the [free functions in `alloc`](self#functions).
#[unstable(feature = "allocator_api", issue = "32838")]
#[derive(Copy, Clone, Default, Debug)]
#[cfg(not(test))]
pub struct Global;
#[cfg(test)]
pub use std::alloc::Global;
/// Allocate memory with the global allocator.
///
/// This function forwards calls to the [`GlobalAlloc::alloc`] method
/// of the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `alloc` method
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
/// See [`GlobalAlloc::alloc`].
///
/// # Examples
///
/// ```
/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
///
/// unsafe {
/// let layout = Layout::new::<u16>();
/// let ptr = alloc(layout);
/// if ptr.is_null() {
/// handle_alloc_error(layout);
/// }
///
/// *(ptr as *mut u16) = 42;
/// assert_eq!(*(ptr as *mut u16), 42);
///
/// dealloc(ptr, layout);
/// }
/// ```
#[stable(feature = "global_alloc", since = "1.28.0")]
#[must_use = "losing the pointer will leak memory"]
#[inline]
pub unsafe fn alloc(layout: Layout) -> *mut u8 {
unsafe {
// Make sure we don't accidentally allow omitting the allocator shim in
// stable code until it is actually stabilized.
#[cfg(not(bootstrap))]
core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
__rust_alloc(layout.size(), layout.align())
}
}
/// Deallocate memory with the global allocator.
///
/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
/// of the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `dealloc` method
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
/// See [`GlobalAlloc::dealloc`].
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
}
/// Reallocate memory with the global allocator.
///
/// This function forwards calls to the [`GlobalAlloc::realloc`] method
/// of the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `realloc` method
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
/// See [`GlobalAlloc::realloc`].
#[stable(feature = "global_alloc", since = "1.28.0")]
#[must_use = "losing the pointer will leak memory"]
#[inline]
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
}
/// Allocate zero-initialized memory with the global allocator.
///
/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
/// of the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
/// See [`GlobalAlloc::alloc_zeroed`].
///
/// # Examples
///
/// ```
/// use std::alloc::{alloc_zeroed, dealloc, Layout};
///
/// unsafe {
/// let layout = Layout::new::<u16>();
/// let ptr = alloc_zeroed(layout);
///
/// assert_eq!(*(ptr as *mut u16), 0);
///
/// dealloc(ptr, layout);
/// }
/// ```
#[stable(feature = "global_alloc", since = "1.28.0")]
#[must_use = "losing the pointer will leak memory"]
#[inline]
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
}
#[cfg(not(test))]
impl Global {
#[inline]
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
// SAFETY: `layout` is non-zero in size,
size => unsafe {
let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, size))
},
}
}
// SAFETY: Same as `Allocator::grow`
#[inline]
unsafe fn grow_impl(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
zeroed: bool,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
match old_layout.size() {
0 => self.alloc_impl(new_layout, zeroed),
// SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
// as required by safety conditions. Other conditions must be upheld by the caller
old_size if old_layout.align() == new_layout.align() => unsafe {
let new_size = new_layout.size();
// `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
intrinsics::assume(new_size >= old_layout.size());
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
if zeroed {
raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
}
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
},
// SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
// both the old and new memory allocation are valid for reads and writes for `old_size`
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
// for `dealloc` must be upheld by the caller.
old_size => unsafe {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
}
}
#[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(test))]
unsafe impl Allocator for Global {
#[inline]
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY: `layout` is non-zero in size,
// other conditions must be upheld by the caller
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}
#[inline]
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
}
#[inline]
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
// SAFETY: all conditions must be upheld by the caller
unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
}
#[inline]
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
match new_layout.size() {
// SAFETY: conditions must be upheld by the caller
0 => unsafe {
self.deallocate(ptr, old_layout);
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
},
// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
new_size if old_layout.align() == new_layout.align() => unsafe {
// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
intrinsics::assume(new_size <= old_layout.size());
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
},
// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
// both the old and new memory allocation are valid for reads and writes for `new_size`
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
// for `dealloc` must be upheld by the caller.
new_size => unsafe {
let new_ptr = self.allocate(new_layout)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
}
}
/// The allocator for unique pointers.
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[lang = "exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.allocate(layout) {
Ok(ptr) => ptr.as_mut_ptr(),
Err(_) => handle_alloc_error(layout),
}
}
#[cfg_attr(not(test), lang = "box_free")]
#[inline]
// This signature has to be the same as `Box`, otherwise an ICE will happen.
// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
alloc.deallocate(From::from(ptr.cast()), layout)
}
}
// # Allocation error handler
#[cfg(not(no_global_oom_handling))]
extern "Rust" {
// This is the magic symbol to call the global alloc error handler. rustc generates
// it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
// default implementations below (`__rdl_oom`) otherwise.
fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
}
/// Abort on memory allocation error or failure.
///
/// Callers of memory allocation APIs wishing to abort computation
/// in response to an allocation error are encouraged to call this function,
/// rather than directly invoking `panic!` or similar.
///
/// The default behavior of this function is to print a message to standard error
/// and abort the process.
/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
///
/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
#[stable(feature = "global_alloc", since = "1.28.0")]
#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[cold]
pub const fn handle_alloc_error(layout: Layout) -> ! {
const fn ct_error(_: Layout) -> ! {
panic!("allocation failed");
}
fn rt_error(layout: Layout) -> ! {
unsafe {
__rust_alloc_error_handler(layout.size(), layout.align());
}
}
unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) }
}
// For alloc test `std::alloc::handle_alloc_error` can be used directly.
#[cfg(all(not(no_global_oom_handling), test))]
pub use std::alloc::handle_alloc_error;
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[doc(hidden)]
#[allow(unused_attributes)]
#[unstable(feature = "alloc_internals", issue = "none")]
pub mod __alloc_error_handler {
// called via generated `__rust_alloc_error_handler` if there is no
// `#[alloc_error_handler]`.
#[rustc_std_internal_symbol]
pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
extern "Rust" {
// This symbol is emitted by rustc next to __rust_alloc_error_handler.
// Its value depends on the -Zoom={panic,abort} compiler option.
static __rust_alloc_error_handler_should_panic: u8;
}
#[allow(unused_unsafe)]
if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
panic!("memory allocation of {size} bytes failed")
} else {
core::panicking::panic_nounwind_fmt(format_args!(
"memory allocation of {size} bytes failed"
))
}
}
}
/// Specialize clones into pre-allocated, uninitialized memory.
/// Used by `Box::clone` and `Rc`/`Arc::make_mut`.
pub(crate) trait WriteCloneIntoRaw: Sized {
unsafe fn write_clone_into_raw(&self, target: *mut Self);
}
impl<T: Clone> WriteCloneIntoRaw for T {
#[inline]
default unsafe fn write_clone_into_raw(&self, target: *mut Self) {
// Having allocated *first* may allow the optimizer to create
// the cloned value in-place, skipping the local and move.
unsafe { target.write(self.clone()) };
}
}
impl<T: Copy> WriteCloneIntoRaw for T {
#[inline]
unsafe fn write_clone_into_raw(&self, target: *mut Self) {
// We can always copy in-place, without ever involving a local value.
unsafe { target.copy_from_nonoverlapping(self, 1) };
}
}