]> git.proxmox.com Git - ceph.git/blame - ceph/src/civetweb/src/third_party/duktape-1.8.0/src-separate/duk_heap_memory.c
buildsys: switch source download to quincy
[ceph.git] / ceph / src / civetweb / src / third_party / duktape-1.8.0 / src-separate / duk_heap_memory.c
CommitLineData
7c673cae
FG
1/*
2 * Memory allocation handling.
3 */
4
5#include "duk_internal.h"
6
7/*
8 * Helpers
9 *
10 * The fast path checks are done within a macro to ensure "inlining"
11 * while the slow path actions use a helper (which won't typically be
12 * inlined in size optimized builds).
13 */
14
15#if defined(DUK_USE_MARK_AND_SWEEP) && defined(DUK_USE_VOLUNTARY_GC)
16#define DUK__VOLUNTARY_PERIODIC_GC(heap) do { \
17 (heap)->mark_and_sweep_trigger_counter--; \
18 if ((heap)->mark_and_sweep_trigger_counter <= 0) { \
19 duk__run_voluntary_gc(heap); \
20 } \
21 } while (0)
22
23DUK_LOCAL void duk__run_voluntary_gc(duk_heap *heap) {
24 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
25 DUK_DD(DUK_DDPRINT("mark-and-sweep in progress -> skip voluntary mark-and-sweep now"));
26 } else {
27 duk_small_uint_t flags;
28 duk_bool_t rc;
29
30 DUK_D(DUK_DPRINT("triggering voluntary mark-and-sweep"));
31 flags = 0;
32 rc = duk_heap_mark_and_sweep(heap, flags);
33 DUK_UNREF(rc);
34 }
35}
36#else
37#define DUK__VOLUNTARY_PERIODIC_GC(heap) /* no voluntary gc */
38#endif /* DUK_USE_MARK_AND_SWEEP && DUK_USE_VOLUNTARY_GC */
39
40/*
41 * Allocate memory with garbage collection
42 */
43
44#ifdef DUK_USE_MARK_AND_SWEEP
45DUK_INTERNAL void *duk_heap_mem_alloc(duk_heap *heap, duk_size_t size) {
46 void *res;
47 duk_bool_t rc;
48 duk_small_int_t i;
49
50 DUK_ASSERT(heap != NULL);
51 DUK_ASSERT_DISABLE(size >= 0);
52
53 /*
54 * Voluntary periodic GC (if enabled)
55 */
56
57 DUK__VOLUNTARY_PERIODIC_GC(heap);
58
59 /*
60 * First attempt
61 */
62
63#ifdef DUK_USE_GC_TORTURE
64 /* simulate alloc failure on every alloc (except when mark-and-sweep is running) */
65 if (!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
66 DUK_DDD(DUK_DDDPRINT("gc torture enabled, pretend that first alloc attempt fails"));
67 res = NULL;
68 DUK_UNREF(res);
69 goto skip_attempt;
70 }
71#endif
72 res = heap->alloc_func(heap->heap_udata, size);
73 if (res || size == 0) {
74 /* for zero size allocations NULL is allowed */
75 return res;
76 }
77#ifdef DUK_USE_GC_TORTURE
78 skip_attempt:
79#endif
80
81 DUK_D(DUK_DPRINT("first alloc attempt failed, attempt to gc and retry"));
82
83 /*
84 * Avoid a GC if GC is already running. This can happen at a late
85 * stage in a GC when we try to e.g. resize the stringtable
86 * or compact objects.
87 */
88
89 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
90 DUK_D(DUK_DPRINT("duk_heap_mem_alloc() failed, gc in progress (gc skipped), alloc size %ld", (long) size));
91 return NULL;
92 }
93
94 /*
95 * Retry with several GC attempts. Initial attempts are made without
96 * emergency mode; later attempts use emergency mode which minimizes
97 * memory allocations forcibly.
98 */
99
100 for (i = 0; i < DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_LIMIT; i++) {
101 duk_small_uint_t flags;
102
103 flags = 0;
104 if (i >= DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_EMERGENCY_LIMIT - 1) {
105 flags |= DUK_MS_FLAG_EMERGENCY;
106 }
107
108 rc = duk_heap_mark_and_sweep(heap, flags);
109 DUK_UNREF(rc);
110
111 res = heap->alloc_func(heap->heap_udata, size);
112 if (res) {
113 DUK_D(DUK_DPRINT("duk_heap_mem_alloc() succeeded after gc (pass %ld), alloc size %ld",
114 (long) (i + 1), (long) size));
115 return res;
116 }
117 }
118
119 DUK_D(DUK_DPRINT("duk_heap_mem_alloc() failed even after gc, alloc size %ld", (long) size));
120 return NULL;
121}
122#else /* DUK_USE_MARK_AND_SWEEP */
123/*
124 * Compared to a direct macro expansion this wrapper saves a few
125 * instructions because no heap dereferencing is required.
126 */
127DUK_INTERNAL void *duk_heap_mem_alloc(duk_heap *heap, duk_size_t size) {
128 DUK_ASSERT(heap != NULL);
129 DUK_ASSERT_DISABLE(size >= 0);
130
131 return heap->alloc_func(heap->heap_udata, size);
132}
133#endif /* DUK_USE_MARK_AND_SWEEP */
134
135DUK_INTERNAL void *duk_heap_mem_alloc_zeroed(duk_heap *heap, duk_size_t size) {
136 void *res;
137
138 DUK_ASSERT(heap != NULL);
139 DUK_ASSERT_DISABLE(size >= 0);
140
141 res = DUK_ALLOC(heap, size);
142 if (res) {
143 /* assume memset with zero size is OK */
144 DUK_MEMZERO(res, size);
145 }
146 return res;
147}
148
149/*
150 * Reallocate memory with garbage collection
151 */
152
153#ifdef DUK_USE_MARK_AND_SWEEP
154DUK_INTERNAL void *duk_heap_mem_realloc(duk_heap *heap, void *ptr, duk_size_t newsize) {
155 void *res;
156 duk_bool_t rc;
157 duk_small_int_t i;
158
159 DUK_ASSERT(heap != NULL);
160 /* ptr may be NULL */
161 DUK_ASSERT_DISABLE(newsize >= 0);
162
163 /*
164 * Voluntary periodic GC (if enabled)
165 */
166
167 DUK__VOLUNTARY_PERIODIC_GC(heap);
168
169 /*
170 * First attempt
171 */
172
173#ifdef DUK_USE_GC_TORTURE
174 /* simulate alloc failure on every realloc (except when mark-and-sweep is running) */
175 if (!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
176 DUK_DDD(DUK_DDDPRINT("gc torture enabled, pretend that first realloc attempt fails"));
177 res = NULL;
178 DUK_UNREF(res);
179 goto skip_attempt;
180 }
181#endif
182 res = heap->realloc_func(heap->heap_udata, ptr, newsize);
183 if (res || newsize == 0) {
184 /* for zero size allocations NULL is allowed */
185 return res;
186 }
187#ifdef DUK_USE_GC_TORTURE
188 skip_attempt:
189#endif
190
191 DUK_D(DUK_DPRINT("first realloc attempt failed, attempt to gc and retry"));
192
193 /*
194 * Avoid a GC if GC is already running. See duk_heap_mem_alloc().
195 */
196
197 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
198 DUK_D(DUK_DPRINT("duk_heap_mem_realloc() failed, gc in progress (gc skipped), alloc size %ld", (long) newsize));
199 return NULL;
200 }
201
202 /*
203 * Retry with several GC attempts. Initial attempts are made without
204 * emergency mode; later attempts use emergency mode which minimizes
205 * memory allocations forcibly.
206 */
207
208 for (i = 0; i < DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_LIMIT; i++) {
209 duk_small_uint_t flags;
210
211 flags = 0;
212 if (i >= DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_EMERGENCY_LIMIT - 1) {
213 flags |= DUK_MS_FLAG_EMERGENCY;
214 }
215
216 rc = duk_heap_mark_and_sweep(heap, flags);
217 DUK_UNREF(rc);
218
219 res = heap->realloc_func(heap->heap_udata, ptr, newsize);
220 if (res || newsize == 0) {
221 DUK_D(DUK_DPRINT("duk_heap_mem_realloc() succeeded after gc (pass %ld), alloc size %ld",
222 (long) (i + 1), (long) newsize));
223 return res;
224 }
225 }
226
227 DUK_D(DUK_DPRINT("duk_heap_mem_realloc() failed even after gc, alloc size %ld", (long) newsize));
228 return NULL;
229}
230#else /* DUK_USE_MARK_AND_SWEEP */
231/* saves a few instructions to have this wrapper (see comment on duk_heap_mem_alloc) */
232DUK_INTERNAL void *duk_heap_mem_realloc(duk_heap *heap, void *ptr, duk_size_t newsize) {
233 DUK_ASSERT(heap != NULL);
234 /* ptr may be NULL */
235 DUK_ASSERT_DISABLE(newsize >= 0);
236
237 return heap->realloc_func(heap->heap_udata, ptr, newsize);
238}
239#endif /* DUK_USE_MARK_AND_SWEEP */
240
241/*
242 * Reallocate memory with garbage collection, using a callback to provide
243 * the current allocated pointer. This variant is used when a mark-and-sweep
244 * (e.g. finalizers) might change the original pointer.
245 */
246
247#ifdef DUK_USE_MARK_AND_SWEEP
248DUK_INTERNAL void *duk_heap_mem_realloc_indirect(duk_heap *heap, duk_mem_getptr cb, void *ud, duk_size_t newsize) {
249 void *res;
250 duk_bool_t rc;
251 duk_small_int_t i;
252
253 DUK_ASSERT(heap != NULL);
254 DUK_ASSERT_DISABLE(newsize >= 0);
255
256 /*
257 * Voluntary periodic GC (if enabled)
258 */
259
260 DUK__VOLUNTARY_PERIODIC_GC(heap);
261
262 /*
263 * First attempt
264 */
265
266#ifdef DUK_USE_GC_TORTURE
267 /* simulate alloc failure on every realloc (except when mark-and-sweep is running) */
268 if (!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
269 DUK_DDD(DUK_DDDPRINT("gc torture enabled, pretend that first indirect realloc attempt fails"));
270 res = NULL;
271 DUK_UNREF(res);
272 goto skip_attempt;
273 }
274#endif
275 res = heap->realloc_func(heap->heap_udata, cb(heap, ud), newsize);
276 if (res || newsize == 0) {
277 /* for zero size allocations NULL is allowed */
278 return res;
279 }
280#ifdef DUK_USE_GC_TORTURE
281 skip_attempt:
282#endif
283
284 DUK_D(DUK_DPRINT("first indirect realloc attempt failed, attempt to gc and retry"));
285
286 /*
287 * Avoid a GC if GC is already running. See duk_heap_mem_alloc().
288 */
289
290 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
291 DUK_D(DUK_DPRINT("duk_heap_mem_realloc_indirect() failed, gc in progress (gc skipped), alloc size %ld", (long) newsize));
292 return NULL;
293 }
294
295 /*
296 * Retry with several GC attempts. Initial attempts are made without
297 * emergency mode; later attempts use emergency mode which minimizes
298 * memory allocations forcibly.
299 */
300
301 for (i = 0; i < DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_LIMIT; i++) {
302 duk_small_uint_t flags;
303
304#ifdef DUK_USE_ASSERTIONS
305 void *ptr_pre; /* ptr before mark-and-sweep */
306 void *ptr_post;
307#endif
308
309#ifdef DUK_USE_ASSERTIONS
310 ptr_pre = cb(heap, ud);
311#endif
312 flags = 0;
313 if (i >= DUK_HEAP_ALLOC_FAIL_MARKANDSWEEP_EMERGENCY_LIMIT - 1) {
314 flags |= DUK_MS_FLAG_EMERGENCY;
315 }
316
317 rc = duk_heap_mark_and_sweep(heap, flags);
318 DUK_UNREF(rc);
319#ifdef DUK_USE_ASSERTIONS
320 ptr_post = cb(heap, ud);
321 if (ptr_pre != ptr_post) {
322 /* useful for debugging */
323 DUK_DD(DUK_DDPRINT("note: base pointer changed by mark-and-sweep: %p -> %p",
324 (void *) ptr_pre, (void *) ptr_post));
325 }
326#endif
327
328 /* Note: key issue here is to re-lookup the base pointer on every attempt.
329 * The pointer being reallocated may change after every mark-and-sweep.
330 */
331
332 res = heap->realloc_func(heap->heap_udata, cb(heap, ud), newsize);
333 if (res || newsize == 0) {
334 DUK_D(DUK_DPRINT("duk_heap_mem_realloc_indirect() succeeded after gc (pass %ld), alloc size %ld",
335 (long) (i + 1), (long) newsize));
336 return res;
337 }
338 }
339
340 DUK_D(DUK_DPRINT("duk_heap_mem_realloc_indirect() failed even after gc, alloc size %ld", (long) newsize));
341 return NULL;
342}
343#else /* DUK_USE_MARK_AND_SWEEP */
344/* saves a few instructions to have this wrapper (see comment on duk_heap_mem_alloc) */
345DUK_INTERNAL void *duk_heap_mem_realloc_indirect(duk_heap *heap, duk_mem_getptr cb, void *ud, duk_size_t newsize) {
346 return heap->realloc_func(heap->heap_udata, cb(heap, ud), newsize);
347}
348#endif /* DUK_USE_MARK_AND_SWEEP */
349
350/*
351 * Free memory
352 */
353
354#ifdef DUK_USE_MARK_AND_SWEEP
355DUK_INTERNAL void duk_heap_mem_free(duk_heap *heap, void *ptr) {
356 DUK_ASSERT(heap != NULL);
357 /* ptr may be NULL */
358
359 /* Must behave like a no-op with NULL and any pointer returned from
360 * malloc/realloc with zero size.
361 */
362 heap->free_func(heap->heap_udata, ptr);
363
364 /* Count free operations toward triggering a GC but never actually trigger
365 * a GC from a free. Otherwise code which frees internal structures would
366 * need to put in NULLs at every turn to ensure the object is always in
367 * consistent state for a mark-and-sweep.
368 */
369#ifdef DUK_USE_VOLUNTARY_GC
370 heap->mark_and_sweep_trigger_counter--;
371#endif
372}
373#else
374/* saves a few instructions to have this wrapper (see comment on duk_heap_mem_alloc) */
375DUK_INTERNAL void duk_heap_mem_free(duk_heap *heap, void *ptr) {
376 DUK_ASSERT(heap != NULL);
377 /* ptr may be NULL */
378
379 /* Note: must behave like a no-op with NULL and any pointer
380 * returned from malloc/realloc with zero size.
381 */
382 heap->free_func(heap->heap_udata, ptr);
383}
384#endif