]> git.proxmox.com Git - rustc.git/blob - src/test/ui/drop/dynamic-drop-async.rs
New upstream version 1.50.0+dfsg1
[rustc.git] / src / test / ui / drop / dynamic-drop-async.rs
1 // Test that values are not leaked in async functions, even in the cases where:
2 // * Dropping one of the values panics while running the future.
3 // * The future is dropped at one of its suspend points.
4 // * Dropping one of the values panics while dropping the future.
5
6 // run-pass
7 // edition:2018
8 // ignore-wasm32-bare compiled with panic=abort by default
9
10 #![allow(unused)]
11
12 use std::{
13 cell::{Cell, RefCell},
14 future::Future,
15 marker::Unpin,
16 panic,
17 pin::Pin,
18 ptr,
19 rc::Rc,
20 task::{Context, Poll, RawWaker, RawWakerVTable, Waker},
21 };
22
23 struct InjectedFailure;
24
25 struct Defer<T> {
26 ready: bool,
27 value: Option<T>,
28 }
29
30 impl<T: Unpin> Future for Defer<T> {
31 type Output = T;
32 fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
33 if self.ready {
34 Poll::Ready(self.value.take().unwrap())
35 } else {
36 self.ready = true;
37 Poll::Pending
38 }
39 }
40 }
41
42 /// Allocator tracks the creation and destruction of `Ptr`s.
43 /// The `failing_op`-th operation will panic.
44 struct Allocator {
45 data: RefCell<Vec<bool>>,
46 failing_op: usize,
47 cur_ops: Cell<usize>,
48 }
49
50 impl panic::UnwindSafe for Allocator {}
51 impl panic::RefUnwindSafe for Allocator {}
52
53 impl Drop for Allocator {
54 fn drop(&mut self) {
55 let data = self.data.borrow();
56 if data.iter().any(|d| *d) {
57 panic!("missing free: {:?}", data);
58 }
59 }
60 }
61
62 impl Allocator {
63 fn new(failing_op: usize) -> Self {
64 Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) }
65 }
66 fn alloc(&self) -> impl Future<Output = Ptr<'_>> + '_ {
67 self.fallible_operation();
68
69 let mut data = self.data.borrow_mut();
70
71 let addr = data.len();
72 data.push(true);
73 Defer { ready: false, value: Some(Ptr(addr, self)) }
74 }
75 fn fallible_operation(&self) {
76 self.cur_ops.set(self.cur_ops.get() + 1);
77
78 if self.cur_ops.get() == self.failing_op {
79 panic!(InjectedFailure);
80 }
81 }
82 }
83
84 // Type that tracks whether it was dropped and can panic when it's created or
85 // destroyed.
86 struct Ptr<'a>(usize, &'a Allocator);
87 impl<'a> Drop for Ptr<'a> {
88 fn drop(&mut self) {
89 match self.1.data.borrow_mut()[self.0] {
90 false => panic!("double free at index {:?}", self.0),
91 ref mut d => *d = false,
92 }
93
94 self.1.fallible_operation();
95 }
96 }
97
98 async fn dynamic_init(a: Rc<Allocator>, c: bool) {
99 let _x;
100 if c {
101 _x = Some(a.alloc().await);
102 }
103 }
104
105 async fn dynamic_drop(a: Rc<Allocator>, c: bool) {
106 let x = a.alloc().await;
107 if c {
108 Some(x)
109 } else {
110 None
111 };
112 }
113
114 struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
115 async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) {
116 for i in 0..2 {
117 let x;
118 let y;
119 if (c0 && i == 0) || (c1 && i == 1) {
120 x = (a.alloc().await, a.alloc().await, a.alloc().await);
121 y = TwoPtrs(a.alloc().await, a.alloc().await);
122 if c {
123 drop(x.1);
124 a.alloc().await;
125 drop(y.0);
126 a.alloc().await;
127 }
128 }
129 }
130 }
131
132 async fn field_assignment(a: Rc<Allocator>, c0: bool) {
133 let mut x = (TwoPtrs(a.alloc().await, a.alloc().await), a.alloc().await);
134
135 x.1 = a.alloc().await;
136 x.1 = a.alloc().await;
137
138 let f = (x.0).0;
139 a.alloc().await;
140 if c0 {
141 (x.0).0 = f;
142 }
143 a.alloc().await;
144 }
145
146 async fn assignment(a: Rc<Allocator>, c0: bool, c1: bool) {
147 let mut _v = a.alloc().await;
148 let mut _w = a.alloc().await;
149 if c0 {
150 drop(_v);
151 }
152 _v = _w;
153 if c1 {
154 _w = a.alloc().await;
155 }
156 }
157
158 async fn array_simple(a: Rc<Allocator>) {
159 let _x = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
160 }
161
162 async fn vec_simple(a: Rc<Allocator>) {
163 let _x = vec![a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
164 }
165
166 async fn mixed_drop_and_nondrop(a: Rc<Allocator>) {
167 // check that destructor panics handle drop
168 // and non-drop blocks in the same scope correctly.
169 //
170 // Surprisingly enough, this used to not work.
171 let (x, y, z);
172 x = a.alloc().await;
173 y = 5;
174 z = a.alloc().await;
175 }
176
177 #[allow(unreachable_code)]
178 async fn vec_unreachable(a: Rc<Allocator>) {
179 let _x = vec![a.alloc().await, a.alloc().await, a.alloc().await, return];
180 }
181
182 async fn slice_pattern_one_of(a: Rc<Allocator>, i: usize) {
183 let array = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
184 let _x = match i {
185 0 => {
186 let [a, ..] = array;
187 a
188 }
189 1 => {
190 let [_, a, ..] = array;
191 a
192 }
193 2 => {
194 let [_, _, a, _] = array;
195 a
196 }
197 3 => {
198 let [_, _, _, a] = array;
199 a
200 }
201 _ => panic!("unmatched"),
202 };
203 a.alloc().await;
204 }
205
206 async fn subslice_pattern_from_end_with_drop(a: Rc<Allocator>, arg: bool, arg2: bool) {
207 let arr = [a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await];
208 if arg2 {
209 drop(arr);
210 return;
211 }
212
213 if arg {
214 let [.., _x, _] = arr;
215 } else {
216 let [_, _y @ ..] = arr;
217 }
218 a.alloc().await;
219 }
220
221 async fn subslice_pattern_reassign(a: Rc<Allocator>) {
222 let mut ar = [a.alloc().await, a.alloc().await, a.alloc().await];
223 let [_, _, _x] = ar;
224 ar = [a.alloc().await, a.alloc().await, a.alloc().await];
225 let [_, _y @ ..] = ar;
226 a.alloc().await;
227 }
228
229 async fn move_ref_pattern(a: Rc<Allocator>) {
230 let mut tup = (a.alloc().await, a.alloc().await, a.alloc().await, a.alloc().await);
231 let (ref _a, ref mut _b, _c, mut _d) = tup;
232 a.alloc().await;
233 }
234
235 fn run_test<F, G>(cx: &mut Context<'_>, ref f: F)
236 where
237 F: Fn(Rc<Allocator>) -> G,
238 G: Future<Output = ()>,
239 {
240 for polls in 0.. {
241 // Run without any panics to find which operations happen after the
242 // penultimate `poll`.
243 let first_alloc = Rc::new(Allocator::new(usize::MAX));
244 let mut fut = Box::pin(f(first_alloc.clone()));
245 let mut ops_before_last_poll = 0;
246 let mut completed = false;
247 for _ in 0..polls {
248 ops_before_last_poll = first_alloc.cur_ops.get();
249 if let Poll::Ready(()) = fut.as_mut().poll(cx) {
250 completed = true;
251 }
252 }
253 drop(fut);
254
255 // Start at `ops_before_last_poll` so that we will always be able to
256 // `poll` the expected number of times.
257 for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() {
258 let alloc = Rc::new(Allocator::new(failing_op + 1));
259 let f = &f;
260 let cx = &mut *cx;
261 let result = panic::catch_unwind(panic::AssertUnwindSafe(move || {
262 let mut fut = Box::pin(f(alloc));
263 for _ in 0..polls {
264 let _ = fut.as_mut().poll(cx);
265 }
266 drop(fut);
267 }));
268 match result {
269 Ok(..) => panic!("test executed more ops on first call"),
270 Err(e) => {
271 if e.downcast_ref::<InjectedFailure>().is_none() {
272 panic::resume_unwind(e);
273 }
274 }
275 }
276 }
277
278 if completed {
279 break;
280 }
281 }
282 }
283
284 fn clone_waker(data: *const ()) -> RawWaker {
285 RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop))
286 }
287
288 fn main() {
289 let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) };
290 let context = &mut Context::from_waker(&waker);
291
292 run_test(context, |a| dynamic_init(a, false));
293 run_test(context, |a| dynamic_init(a, true));
294 run_test(context, |a| dynamic_drop(a, false));
295 run_test(context, |a| dynamic_drop(a, true));
296
297 run_test(context, |a| assignment(a, false, false));
298 run_test(context, |a| assignment(a, false, true));
299 run_test(context, |a| assignment(a, true, false));
300 run_test(context, |a| assignment(a, true, true));
301
302 run_test(context, |a| array_simple(a));
303 run_test(context, |a| vec_simple(a));
304 run_test(context, |a| vec_unreachable(a));
305
306 run_test(context, |a| struct_dynamic_drop(a, false, false, false));
307 run_test(context, |a| struct_dynamic_drop(a, false, false, true));
308 run_test(context, |a| struct_dynamic_drop(a, false, true, false));
309 run_test(context, |a| struct_dynamic_drop(a, false, true, true));
310 run_test(context, |a| struct_dynamic_drop(a, true, false, false));
311 run_test(context, |a| struct_dynamic_drop(a, true, false, true));
312 run_test(context, |a| struct_dynamic_drop(a, true, true, false));
313 run_test(context, |a| struct_dynamic_drop(a, true, true, true));
314
315 run_test(context, |a| field_assignment(a, false));
316 run_test(context, |a| field_assignment(a, true));
317
318 run_test(context, |a| mixed_drop_and_nondrop(a));
319
320 run_test(context, |a| slice_pattern_one_of(a, 0));
321 run_test(context, |a| slice_pattern_one_of(a, 1));
322 run_test(context, |a| slice_pattern_one_of(a, 2));
323 run_test(context, |a| slice_pattern_one_of(a, 3));
324
325 run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
326 run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
327 run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
328 run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
329 run_test(context, |a| subslice_pattern_reassign(a));
330
331 run_test(context, |a| move_ref_pattern(a));
332 }