]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/alloc.rs
New upstream version 1.48.0~beta.8+dfsg1
[rustc.git] / library / alloc / src / alloc.rs
CommitLineData
94b46f34
XL
1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
83c7162d 4
ba9703b0 5use core::intrinsics::{self, min_align_of_val, size_of_val};
1b1a35ee 6use core::ptr::{self, NonNull, Unique};
83c7162d 7
94b46f34 8#[stable(feature = "alloc_module", since = "1.28.0")]
83c7162d
XL
9#[doc(inline)]
10pub use core::alloc::*;
11
416331ca
XL
12#[cfg(test)]
13mod tests;
14
83c7162d 15extern "Rust" {
a1dfa0c6
XL
16 // These are the magic symbols to call the global allocator. rustc generates
17 // them from the `#[global_allocator]` attribute if there is one, or uses the
18 // default implementations in libstd (`__rdl_alloc` etc in `src/libstd/alloc.rs`)
19 // otherwise.
416331ca 20 #[rustc_allocator]
83c7162d
XL
21 #[rustc_allocator_nounwind]
22 fn __rust_alloc(size: usize, align: usize) -> *mut u8;
23 #[rustc_allocator_nounwind]
24 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
25 #[rustc_allocator_nounwind]
dfeec247 26 fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
83c7162d
XL
27 #[rustc_allocator_nounwind]
28 fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
29}
30
94b46f34
XL
31/// The global memory allocator.
32///
74b04a01 33/// This type implements the [`AllocRef`] trait by forwarding calls
94b46f34
XL
34/// to the allocator registered with the `#[global_allocator]` attribute
35/// if there is one, or the `std` crate’s default.
9fa01778
XL
36///
37/// Note: while this type is unstable, the functionality it provides can be
38/// accessed through the [free functions in `alloc`](index.html#functions).
94b46f34 39#[unstable(feature = "allocator_api", issue = "32838")]
83c7162d
XL
40#[derive(Copy, Clone, Default, Debug)]
41pub struct Global;
42
94b46f34
XL
43/// Allocate memory with the global allocator.
44///
45/// This function forwards calls to the [`GlobalAlloc::alloc`] method
46/// of the allocator registered with the `#[global_allocator]` attribute
47/// if there is one, or the `std` crate’s default.
48///
49/// This function is expected to be deprecated in favor of the `alloc` method
74b04a01 50/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
94b46f34
XL
51///
52/// # Safety
53///
54/// See [`GlobalAlloc::alloc`].
b7449926
XL
55///
56/// # Examples
57///
58/// ```
59/// use std::alloc::{alloc, dealloc, Layout};
60///
61/// unsafe {
62/// let layout = Layout::new::<u16>();
63/// let ptr = alloc(layout);
64///
65/// *(ptr as *mut u16) = 42;
66/// assert_eq!(*(ptr as *mut u16), 42);
67///
68/// dealloc(ptr, layout);
69/// }
70/// ```
94b46f34
XL
71#[stable(feature = "global_alloc", since = "1.28.0")]
72#[inline]
73pub unsafe fn alloc(layout: Layout) -> *mut u8 {
f035d41b 74 unsafe { __rust_alloc(layout.size(), layout.align()) }
94b46f34 75}
83c7162d 76
94b46f34
XL
77/// Deallocate memory with the global allocator.
78///
79/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
80/// of the allocator registered with the `#[global_allocator]` attribute
81/// if there is one, or the `std` crate’s default.
82///
83/// This function is expected to be deprecated in favor of the `dealloc` method
74b04a01 84/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
94b46f34
XL
85///
86/// # Safety
87///
88/// See [`GlobalAlloc::dealloc`].
89#[stable(feature = "global_alloc", since = "1.28.0")]
90#[inline]
91pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
f035d41b 92 unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
94b46f34 93}
83c7162d 94
94b46f34
XL
95/// Reallocate memory with the global allocator.
96///
97/// This function forwards calls to the [`GlobalAlloc::realloc`] method
98/// of the allocator registered with the `#[global_allocator]` attribute
99/// if there is one, or the `std` crate’s default.
100///
101/// This function is expected to be deprecated in favor of the `realloc` method
74b04a01 102/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
94b46f34
XL
103///
104/// # Safety
105///
106/// See [`GlobalAlloc::realloc`].
107#[stable(feature = "global_alloc", since = "1.28.0")]
108#[inline]
109pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
f035d41b 110 unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
94b46f34 111}
83c7162d 112
94b46f34
XL
113/// Allocate zero-initialized memory with the global allocator.
114///
115/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
116/// of the allocator registered with the `#[global_allocator]` attribute
117/// if there is one, or the `std` crate’s default.
118///
119/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
74b04a01 120/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
94b46f34
XL
121///
122/// # Safety
123///
124/// See [`GlobalAlloc::alloc_zeroed`].
b7449926
XL
125///
126/// # Examples
127///
128/// ```
129/// use std::alloc::{alloc_zeroed, dealloc, Layout};
130///
131/// unsafe {
132/// let layout = Layout::new::<u16>();
133/// let ptr = alloc_zeroed(layout);
134///
135/// assert_eq!(*(ptr as *mut u16), 0);
136///
137/// dealloc(ptr, layout);
138/// }
139/// ```
94b46f34
XL
140#[stable(feature = "global_alloc", since = "1.28.0")]
141#[inline]
142pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
f035d41b 143 unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
83c7162d
XL
144}
145
3dfed10e 146impl Global {
83c7162d 147 #[inline]
1b1a35ee 148 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e
XL
149 match layout.size() {
150 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
151 // SAFETY: `layout` is non-zero in size,
152 size => unsafe {
153 let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
1b1a35ee 154 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
3dfed10e
XL
155 Ok(NonNull::slice_from_raw_parts(ptr, size))
156 },
74b04a01 157 }
83c7162d
XL
158 }
159
1b1a35ee 160 // SAFETY: Same as `AllocRef::grow`
3dfed10e
XL
161 #[inline]
162 unsafe fn grow_impl(
1b1a35ee 163 &self,
3dfed10e 164 ptr: NonNull<u8>,
1b1a35ee
XL
165 old_layout: Layout,
166 new_layout: Layout,
3dfed10e 167 zeroed: bool,
1b1a35ee 168 ) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e 169 debug_assert!(
1b1a35ee
XL
170 new_layout.size() >= old_layout.size(),
171 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
3dfed10e
XL
172 );
173
1b1a35ee
XL
174 match old_layout.size() {
175 0 => self.alloc_impl(new_layout, zeroed),
3dfed10e
XL
176
177 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
178 // as required by safety conditions. Other conditions must be upheld by the caller
1b1a35ee
XL
179 old_size if old_layout.align() == new_layout.align() => unsafe {
180 let new_size = new_layout.size();
181
182 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
183 intrinsics::assume(new_size >= old_layout.size());
3dfed10e 184
1b1a35ee
XL
185 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
186 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
3dfed10e
XL
187 if zeroed {
188 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
189 }
190 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
191 },
1b1a35ee
XL
192
193 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
194 // both the old and new memory allocation are valid for reads and writes for `old_size`
195 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
196 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
197 // for `dealloc` must be upheld by the caller.
198 old_size => unsafe {
199 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
200 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
201 self.dealloc(ptr, old_layout);
202 Ok(new_ptr)
203 },
3dfed10e
XL
204 }
205 }
206}
207
208#[unstable(feature = "allocator_api", issue = "32838")]
209unsafe impl AllocRef for Global {
210 #[inline]
1b1a35ee 211 fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e
XL
212 self.alloc_impl(layout, false)
213 }
214
215 #[inline]
1b1a35ee 216 fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e
XL
217 self.alloc_impl(layout, true)
218 }
219
83c7162d 220 #[inline]
1b1a35ee 221 unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
74b04a01 222 if layout.size() != 0 {
3dfed10e
XL
223 // SAFETY: `layout` is non-zero in size,
224 // other conditions must be upheld by the caller
f035d41b 225 unsafe { dealloc(ptr.as_ptr(), layout) }
74b04a01 226 }
83c7162d
XL
227 }
228
229 #[inline]
ba9703b0 230 unsafe fn grow(
1b1a35ee 231 &self,
dfeec247 232 ptr: NonNull<u8>,
1b1a35ee
XL
233 old_layout: Layout,
234 new_layout: Layout,
235 ) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e 236 // SAFETY: all conditions must be upheld by the caller
1b1a35ee 237 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
3dfed10e 238 }
ba9703b0 239
3dfed10e
XL
240 #[inline]
241 unsafe fn grow_zeroed(
1b1a35ee 242 &self,
3dfed10e 243 ptr: NonNull<u8>,
1b1a35ee
XL
244 old_layout: Layout,
245 new_layout: Layout,
246 ) -> Result<NonNull<[u8]>, AllocError> {
3dfed10e 247 // SAFETY: all conditions must be upheld by the caller
1b1a35ee 248 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
83c7162d
XL
249 }
250
251 #[inline]
ba9703b0 252 unsafe fn shrink(
1b1a35ee 253 &self,
ba9703b0 254 ptr: NonNull<u8>,
1b1a35ee
XL
255 old_layout: Layout,
256 new_layout: Layout,
257 ) -> Result<NonNull<[u8]>, AllocError> {
ba9703b0 258 debug_assert!(
1b1a35ee
XL
259 new_layout.size() <= old_layout.size(),
260 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
ba9703b0
XL
261 );
262
1b1a35ee 263 match new_layout.size() {
3dfed10e
XL
264 // SAFETY: conditions must be upheld by the caller
265 0 => unsafe {
1b1a35ee
XL
266 self.dealloc(ptr, old_layout);
267 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
3dfed10e 268 },
ba9703b0 269
3dfed10e 270 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
1b1a35ee
XL
271 new_size if old_layout.align() == new_layout.align() => unsafe {
272 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
273 intrinsics::assume(new_size <= old_layout.size());
3dfed10e 274
1b1a35ee
XL
275 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
276 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
3dfed10e
XL
277 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
278 },
1b1a35ee
XL
279
280 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
281 // both the old and new memory allocation are valid for reads and writes for `new_size`
282 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
283 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
284 // for `dealloc` must be upheld by the caller.
285 new_size => unsafe {
286 let new_ptr = self.alloc(new_layout)?;
287 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
288 self.dealloc(ptr, old_layout);
289 Ok(new_ptr)
290 },
74b04a01 291 }
83c7162d
XL
292 }
293}
294
295/// The allocator for unique pointers.
94b46f34 296// This function must not unwind. If it does, MIR codegen will fail.
83c7162d
XL
297#[cfg(not(test))]
298#[lang = "exchange_malloc"]
299#[inline]
300unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
f035d41b 301 let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
3dfed10e 302 match Global.alloc(layout) {
1b1a35ee 303 Ok(ptr) => ptr.as_mut_ptr(),
ba9703b0 304 Err(_) => handle_alloc_error(layout),
83c7162d
XL
305 }
306}
307
94b46f34 308#[cfg_attr(not(test), lang = "box_free")]
83c7162d 309#[inline]
74b04a01
XL
310// This signature has to be the same as `Box`, otherwise an ICE will happen.
311// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
312// well.
313// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
314// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
83c7162d 315pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
f035d41b
XL
316 unsafe {
317 let size = size_of_val(ptr.as_ref());
318 let align = min_align_of_val(ptr.as_ref());
319 let layout = Layout::from_size_align_unchecked(size, align);
320 Global.dealloc(ptr.cast().into(), layout)
321 }
83c7162d
XL
322}
323
94b46f34
XL
324/// Abort on memory allocation error or failure.
325///
326/// Callers of memory allocation APIs wishing to abort computation
327/// in response to an allocation error are encouraged to call this function,
328/// rather than directly invoking `panic!` or similar.
329///
330/// The default behavior of this function is to print a message to standard error
331/// and abort the process.
332/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
333///
334/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
335/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
336#[stable(feature = "global_alloc", since = "1.28.0")]
337#[rustc_allocator_nounwind]
338pub fn handle_alloc_error(layout: Layout) -> ! {
94b46f34 339 extern "Rust" {
83c7162d 340 #[lang = "oom"]
94b46f34 341 fn oom_impl(layout: Layout) -> !;
83c7162d 342 }
94b46f34 343 unsafe { oom_impl(layout) }
83c7162d 344}