]> git.proxmox.com Git - rustc.git/blame - src/liballoc/heap.rs
New upstream version 1.26.0+dfsg1
[rustc.git] / src / liballoc / heap.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
041b39d2 11#![unstable(feature = "allocator_api",
62682a34
SL
12 reason = "the precise API and guarantees it provides may be tweaked \
13 slightly, especially to possibly take into account the \
14 types being stored to make room for a future \
e9174d1e 15 tracing garbage collector",
041b39d2 16 issue = "32838")]
62682a34 17
476ff2be 18use core::intrinsics::{min_align_of_val, size_of_val};
041b39d2
XL
19use core::mem::{self, ManuallyDrop};
20use core::usize;
d9579d0f 21
0531ce1d 22pub use core::heap::*;
041b39d2
XL
23#[doc(hidden)]
24pub mod __core {
25 pub use core::*;
e9174d1e
SL
26}
27
041b39d2
XL
28extern "Rust" {
29 #[allocator]
3b2f2976 30 #[rustc_allocator_nounwind]
041b39d2
XL
31 fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
32 #[cold]
3b2f2976 33 #[rustc_allocator_nounwind]
041b39d2 34 fn __rust_oom(err: *const u8) -> !;
3b2f2976 35 #[rustc_allocator_nounwind]
041b39d2 36 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
3b2f2976 37 #[rustc_allocator_nounwind]
041b39d2
XL
38 fn __rust_usable_size(layout: *const u8,
39 min: *mut usize,
40 max: *mut usize);
3b2f2976 41 #[rustc_allocator_nounwind]
041b39d2
XL
42 fn __rust_realloc(ptr: *mut u8,
43 old_size: usize,
44 old_align: usize,
45 new_size: usize,
46 new_align: usize,
47 err: *mut u8) -> *mut u8;
3b2f2976 48 #[rustc_allocator_nounwind]
041b39d2 49 fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
3b2f2976 50 #[rustc_allocator_nounwind]
041b39d2
XL
51 fn __rust_alloc_excess(size: usize,
52 align: usize,
53 excess: *mut usize,
54 err: *mut u8) -> *mut u8;
3b2f2976 55 #[rustc_allocator_nounwind]
041b39d2
XL
56 fn __rust_realloc_excess(ptr: *mut u8,
57 old_size: usize,
58 old_align: usize,
59 new_size: usize,
60 new_align: usize,
61 excess: *mut usize,
62 err: *mut u8) -> *mut u8;
3b2f2976 63 #[rustc_allocator_nounwind]
041b39d2
XL
64 fn __rust_grow_in_place(ptr: *mut u8,
65 old_size: usize,
66 old_align: usize,
67 new_size: usize,
68 new_align: usize) -> u8;
3b2f2976 69 #[rustc_allocator_nounwind]
041b39d2
XL
70 fn __rust_shrink_in_place(ptr: *mut u8,
71 old_size: usize,
72 old_align: usize,
73 new_size: usize,
74 new_align: usize) -> u8;
d9579d0f
AL
75}
76
041b39d2
XL
77#[derive(Copy, Clone, Default, Debug)]
78pub struct Heap;
1a4d82fc 79
041b39d2
XL
80unsafe impl Alloc for Heap {
81 #[inline]
82 unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
83 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
84 let ptr = __rust_alloc(layout.size(),
85 layout.align(),
86 &mut *err as *mut AllocErr as *mut u8);
87 if ptr.is_null() {
88 Err(ManuallyDrop::into_inner(err))
89 } else {
90 Ok(ptr)
91 }
92 }
1a4d82fc 93
041b39d2 94 #[inline]
3b2f2976 95 #[cold]
041b39d2
XL
96 fn oom(&mut self, err: AllocErr) -> ! {
97 unsafe {
98 __rust_oom(&err as *const AllocErr as *const u8)
99 }
100 }
cc61c64b 101
041b39d2
XL
102 #[inline]
103 unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
104 __rust_dealloc(ptr, layout.size(), layout.align())
105 }
1a4d82fc 106
041b39d2
XL
107 #[inline]
108 fn usable_size(&self, layout: &Layout) -> (usize, usize) {
109 let mut min = 0;
110 let mut max = 0;
111 unsafe {
112 __rust_usable_size(layout as *const Layout as *const u8,
113 &mut min,
114 &mut max);
115 }
116 (min, max)
117 }
1a4d82fc 118
041b39d2
XL
119 #[inline]
120 unsafe fn realloc(&mut self,
121 ptr: *mut u8,
122 layout: Layout,
123 new_layout: Layout)
124 -> Result<*mut u8, AllocErr>
125 {
126 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
127 let ptr = __rust_realloc(ptr,
128 layout.size(),
129 layout.align(),
130 new_layout.size(),
131 new_layout.align(),
132 &mut *err as *mut AllocErr as *mut u8);
133 if ptr.is_null() {
134 Err(ManuallyDrop::into_inner(err))
135 } else {
136 mem::forget(err);
137 Ok(ptr)
138 }
139 }
1a4d82fc 140
041b39d2
XL
141 #[inline]
142 unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
143 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
144 let ptr = __rust_alloc_zeroed(layout.size(),
145 layout.align(),
146 &mut *err as *mut AllocErr as *mut u8);
147 if ptr.is_null() {
148 Err(ManuallyDrop::into_inner(err))
149 } else {
150 Ok(ptr)
151 }
152 }
153
154 #[inline]
155 unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
156 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
157 let mut size = 0;
158 let ptr = __rust_alloc_excess(layout.size(),
159 layout.align(),
160 &mut size,
161 &mut *err as *mut AllocErr as *mut u8);
162 if ptr.is_null() {
163 Err(ManuallyDrop::into_inner(err))
164 } else {
165 Ok(Excess(ptr, size))
166 }
167 }
168
169 #[inline]
170 unsafe fn realloc_excess(&mut self,
171 ptr: *mut u8,
172 layout: Layout,
173 new_layout: Layout) -> Result<Excess, AllocErr> {
174 let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
175 let mut size = 0;
176 let ptr = __rust_realloc_excess(ptr,
177 layout.size(),
178 layout.align(),
179 new_layout.size(),
180 new_layout.align(),
181 &mut size,
182 &mut *err as *mut AllocErr as *mut u8);
183 if ptr.is_null() {
184 Err(ManuallyDrop::into_inner(err))
185 } else {
186 Ok(Excess(ptr, size))
187 }
188 }
189
190 #[inline]
191 unsafe fn grow_in_place(&mut self,
192 ptr: *mut u8,
193 layout: Layout,
194 new_layout: Layout)
195 -> Result<(), CannotReallocInPlace>
196 {
197 debug_assert!(new_layout.size() >= layout.size());
198 debug_assert!(new_layout.align() == layout.align());
199 let ret = __rust_grow_in_place(ptr,
200 layout.size(),
201 layout.align(),
202 new_layout.size(),
203 new_layout.align());
204 if ret != 0 {
205 Ok(())
206 } else {
207 Err(CannotReallocInPlace)
208 }
209 }
210
211 #[inline]
212 unsafe fn shrink_in_place(&mut self,
213 ptr: *mut u8,
214 layout: Layout,
215 new_layout: Layout) -> Result<(), CannotReallocInPlace> {
216 debug_assert!(new_layout.size() <= layout.size());
217 debug_assert!(new_layout.align() == layout.align());
218 let ret = __rust_shrink_in_place(ptr,
219 layout.size(),
220 layout.align(),
221 new_layout.size(),
222 new_layout.align());
223 if ret != 0 {
224 Ok(())
225 } else {
226 Err(CannotReallocInPlace)
227 }
228 }
1a4d82fc
JJ
229}
230
1a4d82fc 231/// The allocator for unique pointers.
32a655c1 232// This function must not unwind. If it does, MIR trans will fail.
1a4d82fc 233#[cfg(not(test))]
d9579d0f 234#[lang = "exchange_malloc"]
1a4d82fc 235#[inline]
85aaf69f 236unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
1a4d82fc 237 if size == 0 {
7cac9316 238 align as *mut u8
1a4d82fc 239 } else {
041b39d2
XL
240 let layout = Layout::from_size_align_unchecked(size, align);
241 Heap.alloc(layout).unwrap_or_else(|err| {
242 Heap.oom(err)
243 })
1a4d82fc
JJ
244 }
245}
246
cc61c64b 247#[cfg_attr(not(test), lang = "box_free")]
7453a54e 248#[inline]
cc61c64b 249pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
476ff2be
SL
250 let size = size_of_val(&*ptr);
251 let align = min_align_of_val(&*ptr);
7453a54e
SL
252 // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
253 if size != 0 {
041b39d2
XL
254 let layout = Layout::from_size_align_unchecked(size, align);
255 Heap.dealloc(ptr as *mut u8, layout);
7453a54e
SL
256 }
257}
258
1a4d82fc 259#[cfg(test)]
d9579d0f 260mod tests {
1a4d82fc
JJ
261 extern crate test;
262 use self::test::Bencher;
c34b1796 263 use boxed::Box;
041b39d2 264 use heap::{Heap, Alloc, Layout};
1a4d82fc 265
cc61c64b
XL
266 #[test]
267 fn allocate_zeroed() {
268 unsafe {
041b39d2
XL
269 let layout = Layout::from_size_align(1024, 1).unwrap();
270 let ptr = Heap.alloc_zeroed(layout.clone())
271 .unwrap_or_else(|e| Heap.oom(e));
cc61c64b 272
041b39d2 273 let end = ptr.offset(layout.size() as isize);
cc61c64b
XL
274 let mut i = ptr;
275 while i < end {
276 assert_eq!(*i, 0);
277 i = i.offset(1);
278 }
041b39d2 279 Heap.dealloc(ptr, layout);
1a4d82fc
JJ
280 }
281 }
282
283 #[bench]
284 fn alloc_owned_small(b: &mut Bencher) {
285 b.iter(|| {
c34b1796 286 let _: Box<_> = box 10;
1a4d82fc
JJ
287 })
288 }
289}