]>
Commit | Line | Data |
---|---|---|
94b46f34 XL |
1 | //! Memory allocation APIs |
2 | ||
3 | #![stable(feature = "alloc_module", since = "1.28.0")] | |
83c7162d | 4 | |
29967ef6 XL |
5 | #[cfg(not(test))] |
6 | use core::intrinsics; | |
29967ef6 | 7 | |
29967ef6 XL |
8 | #[cfg(not(test))] |
9 | use core::ptr::{self, NonNull}; | |
83c7162d | 10 | |
94b46f34 | 11 | #[stable(feature = "alloc_module", since = "1.28.0")] |
83c7162d XL |
12 | #[doc(inline)] |
13 | pub use core::alloc::*; | |
14 | ||
416331ca XL |
15 | #[cfg(test)] |
16 | mod tests; | |
17 | ||
83c7162d | 18 | extern "Rust" { |
9c376795 | 19 | // These are the magic symbols to call the global allocator. rustc generates |
29967ef6 XL |
20 | // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute |
21 | // (the code expanding that attribute macro generates those functions), or to call | |
9c376795 | 22 | // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) |
a1dfa0c6 | 23 | // otherwise. |
064997fb | 24 | // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them |
fc512014 | 25 | // like `malloc`, `realloc`, and `free`, respectively. |
416331ca | 26 | #[rustc_allocator] |
487cf647 | 27 | #[rustc_nounwind] |
83c7162d | 28 | fn __rust_alloc(size: usize, align: usize) -> *mut u8; |
f2b60f7d | 29 | #[rustc_deallocator] |
487cf647 | 30 | #[rustc_nounwind] |
83c7162d | 31 | fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); |
f2b60f7d | 32 | #[rustc_reallocator] |
487cf647 | 33 | #[rustc_nounwind] |
dfeec247 | 34 | fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; |
f2b60f7d | 35 | #[rustc_allocator_zeroed] |
487cf647 | 36 | #[rustc_nounwind] |
83c7162d | 37 | fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; |
49aad941 | 38 | |
49aad941 | 39 | static __rust_no_alloc_shim_is_unstable: u8; |
83c7162d XL |
40 | } |
41 | ||
94b46f34 XL |
42 | /// The global memory allocator. |
43 | /// | |
fc512014 | 44 | /// This type implements the [`Allocator`] trait by forwarding calls |
94b46f34 XL |
45 | /// to the allocator registered with the `#[global_allocator]` attribute |
46 | /// if there is one, or the `std` crate’s default. | |
9fa01778 XL |
47 | /// |
48 | /// Note: while this type is unstable, the functionality it provides can be | |
29967ef6 | 49 | /// accessed through the [free functions in `alloc`](self#functions). |
94b46f34 | 50 | #[unstable(feature = "allocator_api", issue = "32838")] |
83c7162d | 51 | #[derive(Copy, Clone, Default, Debug)] |
29967ef6 | 52 | #[cfg(not(test))] |
83c7162d XL |
53 | pub struct Global; |
54 | ||
29967ef6 XL |
55 | #[cfg(test)] |
56 | pub use std::alloc::Global; | |
57 | ||
94b46f34 XL |
58 | /// Allocate memory with the global allocator. |
59 | /// | |
60 | /// This function forwards calls to the [`GlobalAlloc::alloc`] method | |
61 | /// of the allocator registered with the `#[global_allocator]` attribute | |
62 | /// if there is one, or the `std` crate’s default. | |
63 | /// | |
64 | /// This function is expected to be deprecated in favor of the `alloc` method | |
fc512014 | 65 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
94b46f34 XL |
66 | /// |
67 | /// # Safety | |
68 | /// | |
69 | /// See [`GlobalAlloc::alloc`]. | |
b7449926 XL |
70 | /// |
71 | /// # Examples | |
72 | /// | |
73 | /// ``` | |
064997fb | 74 | /// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout}; |
b7449926 XL |
75 | /// |
76 | /// unsafe { | |
77 | /// let layout = Layout::new::<u16>(); | |
78 | /// let ptr = alloc(layout); | |
064997fb FG |
79 | /// if ptr.is_null() { |
80 | /// handle_alloc_error(layout); | |
81 | /// } | |
b7449926 XL |
82 | /// |
83 | /// *(ptr as *mut u16) = 42; | |
84 | /// assert_eq!(*(ptr as *mut u16), 42); | |
85 | /// | |
86 | /// dealloc(ptr, layout); | |
87 | /// } | |
88 | /// ``` | |
94b46f34 | 89 | #[stable(feature = "global_alloc", since = "1.28.0")] |
3c0e092e | 90 | #[must_use = "losing the pointer will leak memory"] |
94b46f34 XL |
91 | #[inline] |
92 | pub unsafe fn alloc(layout: Layout) -> *mut u8 { | |
49aad941 FG |
93 | unsafe { |
94 | // Make sure we don't accidentally allow omitting the allocator shim in | |
95 | // stable code until it is actually stabilized. | |
49aad941 FG |
96 | core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable); |
97 | ||
98 | __rust_alloc(layout.size(), layout.align()) | |
99 | } | |
94b46f34 | 100 | } |
83c7162d | 101 | |
94b46f34 XL |
102 | /// Deallocate memory with the global allocator. |
103 | /// | |
104 | /// This function forwards calls to the [`GlobalAlloc::dealloc`] method | |
105 | /// of the allocator registered with the `#[global_allocator]` attribute | |
106 | /// if there is one, or the `std` crate’s default. | |
107 | /// | |
108 | /// This function is expected to be deprecated in favor of the `dealloc` method | |
fc512014 | 109 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
94b46f34 XL |
110 | /// |
111 | /// # Safety | |
112 | /// | |
113 | /// See [`GlobalAlloc::dealloc`]. | |
114 | #[stable(feature = "global_alloc", since = "1.28.0")] | |
115 | #[inline] | |
116 | pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { | |
f035d41b | 117 | unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } |
94b46f34 | 118 | } |
83c7162d | 119 | |
94b46f34 XL |
120 | /// Reallocate memory with the global allocator. |
121 | /// | |
122 | /// This function forwards calls to the [`GlobalAlloc::realloc`] method | |
123 | /// of the allocator registered with the `#[global_allocator]` attribute | |
124 | /// if there is one, or the `std` crate’s default. | |
125 | /// | |
126 | /// This function is expected to be deprecated in favor of the `realloc` method | |
fc512014 | 127 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
94b46f34 XL |
128 | /// |
129 | /// # Safety | |
130 | /// | |
131 | /// See [`GlobalAlloc::realloc`]. | |
132 | #[stable(feature = "global_alloc", since = "1.28.0")] | |
3c0e092e | 133 | #[must_use = "losing the pointer will leak memory"] |
94b46f34 XL |
134 | #[inline] |
135 | pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { | |
f035d41b | 136 | unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) } |
94b46f34 | 137 | } |
83c7162d | 138 | |
94b46f34 XL |
139 | /// Allocate zero-initialized memory with the global allocator. |
140 | /// | |
141 | /// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method | |
142 | /// of the allocator registered with the `#[global_allocator]` attribute | |
143 | /// if there is one, or the `std` crate’s default. | |
144 | /// | |
145 | /// This function is expected to be deprecated in favor of the `alloc_zeroed` method | |
fc512014 | 146 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
94b46f34 XL |
147 | /// |
148 | /// # Safety | |
149 | /// | |
150 | /// See [`GlobalAlloc::alloc_zeroed`]. | |
b7449926 XL |
151 | /// |
152 | /// # Examples | |
153 | /// | |
154 | /// ``` | |
155 | /// use std::alloc::{alloc_zeroed, dealloc, Layout}; | |
156 | /// | |
157 | /// unsafe { | |
158 | /// let layout = Layout::new::<u16>(); | |
159 | /// let ptr = alloc_zeroed(layout); | |
160 | /// | |
161 | /// assert_eq!(*(ptr as *mut u16), 0); | |
162 | /// | |
163 | /// dealloc(ptr, layout); | |
164 | /// } | |
165 | /// ``` | |
94b46f34 | 166 | #[stable(feature = "global_alloc", since = "1.28.0")] |
3c0e092e | 167 | #[must_use = "losing the pointer will leak memory"] |
94b46f34 XL |
168 | #[inline] |
169 | pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { | |
f035d41b | 170 | unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } |
83c7162d XL |
171 | } |
172 | ||
29967ef6 | 173 | #[cfg(not(test))] |
3dfed10e | 174 | impl Global { |
83c7162d | 175 | #[inline] |
1b1a35ee | 176 | fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { |
3dfed10e XL |
177 | match layout.size() { |
178 | 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), | |
179 | // SAFETY: `layout` is non-zero in size, | |
180 | size => unsafe { | |
181 | let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) }; | |
1b1a35ee | 182 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
3dfed10e XL |
183 | Ok(NonNull::slice_from_raw_parts(ptr, size)) |
184 | }, | |
74b04a01 | 185 | } |
83c7162d XL |
186 | } |
187 | ||
fc512014 | 188 | // SAFETY: Same as `Allocator::grow` |
3dfed10e XL |
189 | #[inline] |
190 | unsafe fn grow_impl( | |
1b1a35ee | 191 | &self, |
3dfed10e | 192 | ptr: NonNull<u8>, |
1b1a35ee XL |
193 | old_layout: Layout, |
194 | new_layout: Layout, | |
3dfed10e | 195 | zeroed: bool, |
1b1a35ee | 196 | ) -> Result<NonNull<[u8]>, AllocError> { |
3dfed10e | 197 | debug_assert!( |
1b1a35ee XL |
198 | new_layout.size() >= old_layout.size(), |
199 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`" | |
3dfed10e XL |
200 | ); |
201 | ||
1b1a35ee XL |
202 | match old_layout.size() { |
203 | 0 => self.alloc_impl(new_layout, zeroed), | |
3dfed10e XL |
204 | |
205 | // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size` | |
206 | // as required by safety conditions. Other conditions must be upheld by the caller | |
1b1a35ee XL |
207 | old_size if old_layout.align() == new_layout.align() => unsafe { |
208 | let new_size = new_layout.size(); | |
209 | ||
210 | // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. | |
211 | intrinsics::assume(new_size >= old_layout.size()); | |
3dfed10e | 212 | |
1b1a35ee XL |
213 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); |
214 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; | |
3dfed10e XL |
215 | if zeroed { |
216 | raw_ptr.add(old_size).write_bytes(0, new_size - old_size); | |
217 | } | |
218 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) | |
219 | }, | |
1b1a35ee XL |
220 | |
221 | // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, | |
222 | // both the old and new memory allocation are valid for reads and writes for `old_size` | |
223 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap | |
224 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract | |
225 | // for `dealloc` must be upheld by the caller. | |
226 | old_size => unsafe { | |
227 | let new_ptr = self.alloc_impl(new_layout, zeroed)?; | |
228 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); | |
fc512014 | 229 | self.deallocate(ptr, old_layout); |
1b1a35ee XL |
230 | Ok(new_ptr) |
231 | }, | |
3dfed10e XL |
232 | } |
233 | } | |
234 | } | |
235 | ||
236 | #[unstable(feature = "allocator_api", issue = "32838")] | |
29967ef6 | 237 | #[cfg(not(test))] |
fc512014 | 238 | unsafe impl Allocator for Global { |
3dfed10e | 239 | #[inline] |
fc512014 | 240 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
3dfed10e XL |
241 | self.alloc_impl(layout, false) |
242 | } | |
243 | ||
244 | #[inline] | |
fc512014 | 245 | fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
3dfed10e XL |
246 | self.alloc_impl(layout, true) |
247 | } | |
248 | ||
83c7162d | 249 | #[inline] |
fc512014 | 250 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { |
74b04a01 | 251 | if layout.size() != 0 { |
3dfed10e XL |
252 | // SAFETY: `layout` is non-zero in size, |
253 | // other conditions must be upheld by the caller | |
f035d41b | 254 | unsafe { dealloc(ptr.as_ptr(), layout) } |
74b04a01 | 255 | } |
83c7162d XL |
256 | } |
257 | ||
258 | #[inline] | |
ba9703b0 | 259 | unsafe fn grow( |
1b1a35ee | 260 | &self, |
dfeec247 | 261 | ptr: NonNull<u8>, |
1b1a35ee XL |
262 | old_layout: Layout, |
263 | new_layout: Layout, | |
264 | ) -> Result<NonNull<[u8]>, AllocError> { | |
3dfed10e | 265 | // SAFETY: all conditions must be upheld by the caller |
1b1a35ee | 266 | unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } |
3dfed10e | 267 | } |
ba9703b0 | 268 | |
3dfed10e XL |
269 | #[inline] |
270 | unsafe fn grow_zeroed( | |
1b1a35ee | 271 | &self, |
3dfed10e | 272 | ptr: NonNull<u8>, |
1b1a35ee XL |
273 | old_layout: Layout, |
274 | new_layout: Layout, | |
275 | ) -> Result<NonNull<[u8]>, AllocError> { | |
3dfed10e | 276 | // SAFETY: all conditions must be upheld by the caller |
1b1a35ee | 277 | unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } |
83c7162d XL |
278 | } |
279 | ||
280 | #[inline] | |
ba9703b0 | 281 | unsafe fn shrink( |
1b1a35ee | 282 | &self, |
ba9703b0 | 283 | ptr: NonNull<u8>, |
1b1a35ee XL |
284 | old_layout: Layout, |
285 | new_layout: Layout, | |
286 | ) -> Result<NonNull<[u8]>, AllocError> { | |
ba9703b0 | 287 | debug_assert!( |
1b1a35ee XL |
288 | new_layout.size() <= old_layout.size(), |
289 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" | |
ba9703b0 XL |
290 | ); |
291 | ||
1b1a35ee | 292 | match new_layout.size() { |
3dfed10e XL |
293 | // SAFETY: conditions must be upheld by the caller |
294 | 0 => unsafe { | |
fc512014 | 295 | self.deallocate(ptr, old_layout); |
1b1a35ee | 296 | Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) |
3dfed10e | 297 | }, |
ba9703b0 | 298 | |
3dfed10e | 299 | // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller |
1b1a35ee XL |
300 | new_size if old_layout.align() == new_layout.align() => unsafe { |
301 | // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. | |
302 | intrinsics::assume(new_size <= old_layout.size()); | |
3dfed10e | 303 | |
1b1a35ee XL |
304 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); |
305 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; | |
3dfed10e XL |
306 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
307 | }, | |
1b1a35ee XL |
308 | |
309 | // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, | |
310 | // both the old and new memory allocation are valid for reads and writes for `new_size` | |
311 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap | |
312 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract | |
313 | // for `dealloc` must be upheld by the caller. | |
314 | new_size => unsafe { | |
fc512014 | 315 | let new_ptr = self.allocate(new_layout)?; |
1b1a35ee | 316 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); |
fc512014 | 317 | self.deallocate(ptr, old_layout); |
1b1a35ee XL |
318 | Ok(new_ptr) |
319 | }, | |
74b04a01 | 320 | } |
83c7162d XL |
321 | } |
322 | } | |
323 | ||
324 | /// The allocator for unique pointers. | |
17df50a5 | 325 | #[cfg(all(not(no_global_oom_handling), not(test)))] |
83c7162d XL |
326 | #[lang = "exchange_malloc"] |
327 | #[inline] | |
328 | unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { | |
f035d41b | 329 | let layout = unsafe { Layout::from_size_align_unchecked(size, align) }; |
fc512014 | 330 | match Global.allocate(layout) { |
1b1a35ee | 331 | Ok(ptr) => ptr.as_mut_ptr(), |
ba9703b0 | 332 | Err(_) => handle_alloc_error(layout), |
83c7162d XL |
333 | } |
334 | } | |
335 | ||
29967ef6 XL |
336 | // # Allocation error handler |
337 | ||
17df50a5 | 338 | #[cfg(not(no_global_oom_handling))] |
29967ef6 | 339 | extern "Rust" { |
9c376795 | 340 | // This is the magic symbol to call the global alloc error handler. rustc generates |
29967ef6 XL |
341 | // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the |
342 | // default implementations below (`__rdl_oom`) otherwise. | |
29967ef6 XL |
343 | fn __rust_alloc_error_handler(size: usize, align: usize) -> !; |
344 | } | |
345 | ||
781aab86 | 346 | /// Signal a memory allocation error. |
94b46f34 | 347 | /// |
781aab86 | 348 | /// Callers of memory allocation APIs wishing to cease execution |
94b46f34 | 349 | /// in response to an allocation error are encouraged to call this function, |
781aab86 | 350 | /// rather than directly invoking [`panic!`] or similar. |
94b46f34 | 351 | /// |
781aab86 FG |
352 | /// This function is guaranteed to diverge (not return normally with a value), but depending on |
353 | /// global configuration, it may either panic (resulting in unwinding or aborting as per | |
354 | /// configuration for all panics), or abort the process (with no unwinding). | |
355 | /// | |
356 | /// The default behavior is: | |
357 | /// | |
358 | /// * If the binary links against `std` (typically the case), then | |
359 | /// print a message to standard error and abort the process. | |
360 | /// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`]. | |
361 | /// Future versions of Rust may panic by default instead. | |
362 | /// | |
363 | /// * If the binary does not link against `std` (all of its crates are marked | |
364 | /// [`#![no_std]`][no_std]), then call [`panic!`] with a message. | |
365 | /// [The panic handler] applies as to any panic. | |
94b46f34 XL |
366 | /// |
367 | /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html | |
368 | /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html | |
781aab86 FG |
369 | /// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute |
370 | /// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute | |
94b46f34 | 371 | #[stable(feature = "global_alloc", since = "1.28.0")] |
a2a8927a | 372 | #[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")] |
17df50a5 | 373 | #[cfg(all(not(no_global_oom_handling), not(test)))] |
fc512014 | 374 | #[cold] |
a2a8927a XL |
375 | pub const fn handle_alloc_error(layout: Layout) -> ! { |
376 | const fn ct_error(_: Layout) -> ! { | |
377 | panic!("allocation failed"); | |
29967ef6 | 378 | } |
a2a8927a | 379 | |
ed00b5ec | 380 | #[inline] |
a2a8927a XL |
381 | fn rt_error(layout: Layout) -> ! { |
382 | unsafe { | |
383 | __rust_alloc_error_handler(layout.size(), layout.align()); | |
384 | } | |
385 | } | |
386 | ||
ed00b5ec FG |
387 | #[cfg(not(feature = "panic_immediate_abort"))] |
388 | unsafe { | |
389 | core::intrinsics::const_eval_select((layout,), ct_error, rt_error) | |
390 | } | |
391 | ||
392 | #[cfg(feature = "panic_immediate_abort")] | |
393 | ct_error(layout) | |
29967ef6 XL |
394 | } |
395 | ||
396 | // For alloc test `std::alloc::handle_alloc_error` can be used directly. | |
17df50a5 | 397 | #[cfg(all(not(no_global_oom_handling), test))] |
29967ef6 XL |
398 | pub use std::alloc::handle_alloc_error; |
399 | ||
04454e1e | 400 | #[cfg(all(not(no_global_oom_handling), not(test)))] |
29967ef6 XL |
401 | #[doc(hidden)] |
402 | #[allow(unused_attributes)] | |
403 | #[unstable(feature = "alloc_internals", issue = "none")] | |
404 | pub mod __alloc_error_handler { | |
487cf647 FG |
405 | // called via generated `__rust_alloc_error_handler` if there is no |
406 | // `#[alloc_error_handler]`. | |
29967ef6 | 407 | #[rustc_std_internal_symbol] |
923072b8 | 408 | pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! { |
29967ef6 | 409 | extern "Rust" { |
9c376795 FG |
410 | // This symbol is emitted by rustc next to __rust_alloc_error_handler. |
411 | // Its value depends on the -Zoom={panic,abort} compiler option. | |
412 | static __rust_alloc_error_handler_should_panic: u8; | |
413 | } | |
414 | ||
9c376795 FG |
415 | if unsafe { __rust_alloc_error_handler_should_panic != 0 } { |
416 | panic!("memory allocation of {size} bytes failed") | |
417 | } else { | |
781aab86 FG |
418 | core::panicking::panic_nounwind_fmt( |
419 | format_args!("memory allocation of {size} bytes failed"), | |
420 | /* force_no_backtrace */ false, | |
421 | ) | |
29967ef6 | 422 | } |
29967ef6 XL |
423 | } |
424 | } | |
5869c6ff | 425 | |
4b012472 | 426 | #[cfg(not(no_global_oom_handling))] |
5869c6ff XL |
427 | /// Specialize clones into pre-allocated, uninitialized memory. |
428 | /// Used by `Box::clone` and `Rc`/`Arc::make_mut`. | |
429 | pub(crate) trait WriteCloneIntoRaw: Sized { | |
430 | unsafe fn write_clone_into_raw(&self, target: *mut Self); | |
431 | } | |
432 | ||
4b012472 | 433 | #[cfg(not(no_global_oom_handling))] |
5869c6ff XL |
434 | impl<T: Clone> WriteCloneIntoRaw for T { |
435 | #[inline] | |
436 | default unsafe fn write_clone_into_raw(&self, target: *mut Self) { | |
437 | // Having allocated *first* may allow the optimizer to create | |
438 | // the cloned value in-place, skipping the local and move. | |
439 | unsafe { target.write(self.clone()) }; | |
440 | } | |
441 | } | |
442 | ||
4b012472 | 443 | #[cfg(not(no_global_oom_handling))] |
5869c6ff XL |
444 | impl<T: Copy> WriteCloneIntoRaw for T { |
445 | #[inline] | |
446 | unsafe fn write_clone_into_raw(&self, target: *mut Self) { | |
447 | // We can always copy in-place, without ever involving a local value. | |
448 | unsafe { target.copy_from_nonoverlapping(self, 1) }; | |
449 | } | |
450 | } |