]> git.proxmox.com Git - ceph.git/blob - ceph/src/jaegertracing/opentelemetry-cpp/third_party/prometheus-cpp/3rdparty/civetweb/src/third_party/duktape-1.8.0/src-separate/duk_heap_refcount.c
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / jaegertracing / opentelemetry-cpp / third_party / prometheus-cpp / 3rdparty / civetweb / src / third_party / duktape-1.8.0 / src-separate / duk_heap_refcount.c
1 /*
2 * Reference counting implementation.
3 */
4
5 #include "duk_internal.h"
6
7 #ifdef DUK_USE_REFERENCE_COUNTING
8
9 #ifndef DUK_USE_DOUBLE_LINKED_HEAP
10 #error internal error, reference counting requires a double linked heap
11 #endif
12
13 /*
14 * Misc
15 */
16
17 DUK_LOCAL void duk__queue_refzero(duk_heap *heap, duk_heaphdr *hdr) {
18 /* tail insert: don't disturb head in case refzero is running */
19
20 if (heap->refzero_list != NULL) {
21 duk_heaphdr *hdr_prev;
22
23 hdr_prev = heap->refzero_list_tail;
24 DUK_ASSERT(hdr_prev != NULL);
25 DUK_ASSERT(DUK_HEAPHDR_GET_NEXT(heap, hdr_prev) == NULL);
26
27 DUK_HEAPHDR_SET_NEXT(heap, hdr, NULL);
28 DUK_HEAPHDR_SET_PREV(heap, hdr, hdr_prev);
29 DUK_HEAPHDR_SET_NEXT(heap, hdr_prev, hdr);
30 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr);
31 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr_prev);
32 heap->refzero_list_tail = hdr;
33 } else {
34 DUK_ASSERT(heap->refzero_list_tail == NULL);
35 DUK_HEAPHDR_SET_NEXT(heap, hdr, NULL);
36 DUK_HEAPHDR_SET_PREV(heap, hdr, NULL);
37 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr);
38 heap->refzero_list = hdr;
39 heap->refzero_list_tail = hdr;
40 }
41 }
42
43 /*
44 * Heap object refcount finalization.
45 *
46 * When an object is about to be freed, all other objects it refers to must
47 * be decref'd. Refcount finalization does NOT free the object or its inner
48 * allocations (mark-and-sweep shares these helpers), it just manipulates
49 * the refcounts.
50 *
51 * Note that any of the decref's may cause a refcount to drop to zero, BUT
52 * it will not be processed inline; instead, because refzero is already
53 * running, the objects will just be queued to refzero list and processed
54 * later. This eliminates C recursion.
55 */
56
57 DUK_LOCAL void duk__refcount_finalize_hobject(duk_hthread *thr, duk_hobject *h) {
58 duk_uint_fast32_t i;
59
60 DUK_ASSERT(h);
61 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE((duk_heaphdr *) h) == DUK_HTYPE_OBJECT);
62
63 /* XXX: better to get base and walk forwards? */
64
65 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(h); i++) {
66 duk_hstring *key = DUK_HOBJECT_E_GET_KEY(thr->heap, h, i);
67 if (!key) {
68 continue;
69 }
70 duk_heaphdr_decref(thr, (duk_heaphdr *) key);
71 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(thr->heap, h, i)) {
72 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_GETTER(thr->heap, h, i));
73 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_SETTER(thr->heap, h, i));
74 } else {
75 duk_tval_decref(thr, DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, h, i));
76 }
77 }
78
79 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ASIZE(h); i++) {
80 duk_tval_decref(thr, DUK_HOBJECT_A_GET_VALUE_PTR(thr->heap, h, i));
81 }
82
83 /* hash part is a 'weak reference' and does not contribute */
84
85 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_GET_PROTOTYPE(thr->heap, h));
86
87 if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h)) {
88 duk_hcompiledfunction *f = (duk_hcompiledfunction *) h;
89 duk_tval *tv, *tv_end;
90 duk_hobject **funcs, **funcs_end;
91
92 if (DUK_HCOMPILEDFUNCTION_GET_DATA(thr->heap, f) != NULL) {
93 tv = DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(thr->heap, f);
94 tv_end = DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(thr->heap, f);
95 while (tv < tv_end) {
96 duk_tval_decref(thr, tv);
97 tv++;
98 }
99
100 funcs = DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(thr->heap, f);
101 funcs_end = DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(thr->heap, f);
102 while (funcs < funcs_end) {
103 duk_heaphdr_decref(thr, (duk_heaphdr *) *funcs);
104 funcs++;
105 }
106 } else {
107 /* May happen in some out-of-memory corner cases. */
108 DUK_D(DUK_DPRINT("duk_hcompiledfunction 'data' is NULL, skipping decref"));
109 }
110
111 duk_heaphdr_decref(thr, (duk_heaphdr *) DUK_HCOMPILEDFUNCTION_GET_DATA(thr->heap, f));
112 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h)) {
113 duk_hnativefunction *f = (duk_hnativefunction *) h;
114 DUK_UNREF(f);
115 /* nothing to finalize */
116 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h)) {
117 duk_hbufferobject *b = (duk_hbufferobject *) h;
118 if (b->buf) {
119 duk_heaphdr_decref(thr, (duk_heaphdr *) b->buf);
120 }
121 } else if (DUK_HOBJECT_IS_THREAD(h)) {
122 duk_hthread *t = (duk_hthread *) h;
123 duk_tval *tv;
124
125 tv = t->valstack;
126 while (tv < t->valstack_top) {
127 duk_tval_decref(thr, tv);
128 tv++;
129 }
130
131 for (i = 0; i < (duk_uint_fast32_t) t->callstack_top; i++) {
132 duk_activation *act = t->callstack + i;
133 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_ACT_GET_FUNC(act));
134 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->var_env);
135 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->lex_env);
136 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
137 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->prev_caller);
138 #endif
139 }
140
141 #if 0 /* nothing now */
142 for (i = 0; i < (duk_uint_fast32_t) t->catchstack_top; i++) {
143 duk_catcher *cat = t->catchstack + i;
144 }
145 #endif
146
147 for (i = 0; i < DUK_NUM_BUILTINS; i++) {
148 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) t->builtins[i]);
149 }
150
151 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) t->resumer);
152 }
153 }
154
155 DUK_INTERNAL void duk_heaphdr_refcount_finalize(duk_hthread *thr, duk_heaphdr *hdr) {
156 DUK_ASSERT(hdr);
157
158 switch ((int) DUK_HEAPHDR_GET_TYPE(hdr)) {
159 case DUK_HTYPE_OBJECT:
160 duk__refcount_finalize_hobject(thr, (duk_hobject *) hdr);
161 break;
162 case DUK_HTYPE_BUFFER:
163 /* nothing to finalize */
164 break;
165 case DUK_HTYPE_STRING:
166 /* cannot happen: strings are not put into refzero list (they don't even have the next/prev pointers) */
167 default:
168 DUK_UNREACHABLE();
169 }
170 }
171
172 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
173 DUK_LOCAL duk_ret_t duk__refcount_fake_finalizer(duk_context *ctx) {
174 DUK_UNREF(ctx);
175 DUK_D(DUK_DPRINT("fake refcount torture finalizer executed"));
176 #if 0
177 DUK_DD(DUK_DDPRINT("fake torture finalizer for: %!T", duk_get_tval(ctx, 0)));
178 #endif
179 /* Require a lot of stack to force a value stack grow/shrink. */
180 duk_require_stack(ctx, 100000);
181
182 /* XXX: do something to force a callstack grow/shrink, perhaps
183 * just a manual forced resize?
184 */
185 return 0;
186 }
187
188 DUK_LOCAL void duk__refcount_run_torture_finalizer(duk_hthread *thr, duk_hobject *obj) {
189 duk_context *ctx;
190 duk_int_t rc;
191
192 DUK_ASSERT(thr != NULL);
193 DUK_ASSERT(obj != NULL);
194 ctx = (duk_context *) thr;
195
196 /* Avoid fake finalization for the duk__refcount_fake_finalizer function
197 * itself, otherwise we're in infinite recursion.
198 */
199 if (DUK_HOBJECT_HAS_NATIVEFUNCTION(obj)) {
200 if (((duk_hnativefunction *) obj)->func == duk__refcount_fake_finalizer) {
201 DUK_DD(DUK_DDPRINT("avoid fake torture finalizer for duk__refcount_fake_finalizer itself"));
202 return;
203 }
204 }
205 /* Avoid fake finalization when callstack limit has been reached.
206 * Otherwise a callstack limit error will be created, then refzero'ed,
207 * and we're in an infinite loop.
208 */
209 if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit ||
210 thr->callstack_size + 2 * DUK_CALLSTACK_GROW_STEP >= thr->callstack_max /*approximate*/) {
211 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake torture finalizer"));
212 return;
213 }
214
215 /* Run fake finalizer. Avoid creating new refzero queue entries
216 * so that we are not forced into a forever loop.
217 */
218 duk_push_c_function(ctx, duk__refcount_fake_finalizer, 1 /*nargs*/);
219 duk_push_hobject(ctx, obj);
220 rc = duk_pcall(ctx, 1);
221 DUK_UNREF(rc); /* ignored */
222 duk_pop(ctx);
223 }
224 #endif /* DUK_USE_REFZERO_FINALIZER_TORTURE */
225
226 /*
227 * Refcount memory freeing loop.
228 *
229 * Frees objects in the refzero_pending list until the list becomes
230 * empty. When an object is freed, its references get decref'd and
231 * may cause further objects to be queued for freeing.
232 *
233 * This could be expanded to allow incremental freeing: just bail out
234 * early and resume at a future alloc/decref/refzero.
235 */
236
237 DUK_LOCAL void duk__refzero_free_pending(duk_hthread *thr) {
238 duk_heaphdr *h1, *h2;
239 duk_heap *heap;
240 duk_int_t count = 0;
241
242 DUK_ASSERT(thr != NULL);
243 DUK_ASSERT(thr->heap != NULL);
244 heap = thr->heap;
245 DUK_ASSERT(heap != NULL);
246
247 /*
248 * Detect recursive invocation
249 */
250
251 if (DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap)) {
252 DUK_DDD(DUK_DDDPRINT("refzero free running, skip run"));
253 return;
254 }
255
256 /*
257 * Churn refzero_list until empty
258 */
259
260 DUK_HEAP_SET_REFZERO_FREE_RUNNING(heap);
261 while (heap->refzero_list) {
262 duk_hobject *obj;
263 duk_bool_t rescued = 0;
264
265 /*
266 * Pick an object from the head (don't remove yet).
267 */
268
269 h1 = heap->refzero_list;
270 obj = (duk_hobject *) h1;
271 DUK_DD(DUK_DDPRINT("refzero processing %p: %!O", (void *) h1, (duk_heaphdr *) h1));
272 DUK_ASSERT(DUK_HEAPHDR_GET_PREV(heap, h1) == NULL);
273 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(h1) == DUK_HTYPE_OBJECT); /* currently, always the case */
274
275 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
276 /* Torture option to shake out finalizer side effect issues:
277 * make a bogus function call for every finalizable object,
278 * essentially simulating the case where everything has a
279 * finalizer.
280 */
281 DUK_DD(DUK_DDPRINT("refzero torture enabled, fake finalizer"));
282 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1) == 0);
283 DUK_HEAPHDR_PREINC_REFCOUNT(h1); /* bump refcount to prevent refzero during finalizer processing */
284 duk__refcount_run_torture_finalizer(thr, obj); /* must never longjmp */
285 DUK_HEAPHDR_PREDEC_REFCOUNT(h1); /* remove artificial bump */
286 DUK_ASSERT_DISABLE(h1->h_refcount >= 0); /* refcount is unsigned, so always true */
287 #endif
288
289 /*
290 * Finalizer check.
291 *
292 * Note: running a finalizer may have arbitrary side effects, e.g.
293 * queue more objects on refzero_list (tail), or even trigger a
294 * mark-and-sweep.
295 *
296 * Note: quick reject check should match vast majority of
297 * objects and must be safe (not throw any errors, ever).
298 */
299
300 /* An object may have FINALIZED here if it was finalized by mark-and-sweep
301 * on a previous run and refcount then decreased to zero. We won't run the
302 * finalizer again here.
303 */
304
305 /* A finalizer is looked up from the object and up its prototype chain
306 * (which allows inherited finalizers).
307 */
308 if (duk_hobject_hasprop_raw(thr, obj, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) {
309 DUK_DDD(DUK_DDDPRINT("object has a finalizer, run it"));
310
311 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1) == 0);
312 DUK_HEAPHDR_PREINC_REFCOUNT(h1); /* bump refcount to prevent refzero during finalizer processing */
313
314 duk_hobject_run_finalizer(thr, obj); /* must never longjmp */
315 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1)); /* duk_hobject_run_finalizer() sets */
316
317 DUK_HEAPHDR_PREDEC_REFCOUNT(h1); /* remove artificial bump */
318 DUK_ASSERT_DISABLE(h1->h_refcount >= 0); /* refcount is unsigned, so always true */
319
320 if (DUK_HEAPHDR_GET_REFCOUNT(h1) != 0) {
321 DUK_DDD(DUK_DDDPRINT("-> object refcount after finalization non-zero, object will be rescued"));
322 rescued = 1;
323 } else {
324 DUK_DDD(DUK_DDDPRINT("-> object refcount still zero after finalization, object will be freed"));
325 }
326 }
327
328 /* Refzero head is still the same. This is the case even if finalizer
329 * inserted more refzero objects; they are inserted to the tail.
330 */
331 DUK_ASSERT(h1 == heap->refzero_list);
332
333 /*
334 * Remove the object from the refzero list. This cannot be done
335 * before a possible finalizer has been executed; the finalizer
336 * may trigger a mark-and-sweep, and mark-and-sweep must be able
337 * to traverse a complete refzero_list.
338 */
339
340 h2 = DUK_HEAPHDR_GET_NEXT(heap, h1);
341 if (h2) {
342 DUK_HEAPHDR_SET_PREV(heap, h2, NULL); /* not strictly necessary */
343 heap->refzero_list = h2;
344 } else {
345 heap->refzero_list = NULL;
346 heap->refzero_list_tail = NULL;
347 }
348
349 /*
350 * Rescue or free.
351 */
352
353 if (rescued) {
354 /* yes -> move back to heap allocated */
355 DUK_DD(DUK_DDPRINT("object rescued during refcount finalization: %p", (void *) h1));
356 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(h1));
357 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1));
358 DUK_HEAPHDR_CLEAR_FINALIZED(h1);
359 h2 = heap->heap_allocated;
360 DUK_HEAPHDR_SET_PREV(heap, h1, NULL);
361 if (h2) {
362 DUK_HEAPHDR_SET_PREV(heap, h2, h1);
363 }
364 DUK_HEAPHDR_SET_NEXT(heap, h1, h2);
365 DUK_ASSERT_HEAPHDR_LINKS(heap, h1);
366 DUK_ASSERT_HEAPHDR_LINKS(heap, h2);
367 heap->heap_allocated = h1;
368 } else {
369 /* no -> decref members, then free */
370 duk__refcount_finalize_hobject(thr, obj);
371 duk_heap_free_heaphdr_raw(heap, h1);
372 }
373
374 count++;
375 }
376 DUK_HEAP_CLEAR_REFZERO_FREE_RUNNING(heap);
377
378 DUK_DDD(DUK_DDDPRINT("refzero processed %ld objects", (long) count));
379
380 /*
381 * Once the whole refzero cascade has been freed, check for
382 * a voluntary mark-and-sweep.
383 */
384
385 #if defined(DUK_USE_MARK_AND_SWEEP) && defined(DUK_USE_VOLUNTARY_GC)
386 /* 'count' is more or less comparable to normal trigger counter update
387 * which happens in memory block (re)allocation.
388 */
389 heap->mark_and_sweep_trigger_counter -= count;
390 if (heap->mark_and_sweep_trigger_counter <= 0) {
391 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)) {
392 DUK_D(DUK_DPRINT("mark-and-sweep in progress -> skip voluntary mark-and-sweep now"));
393 } else {
394 duk_bool_t rc;
395 duk_small_uint_t flags = 0; /* not emergency */
396 DUK_D(DUK_DPRINT("refcount triggering mark-and-sweep"));
397 rc = duk_heap_mark_and_sweep(heap, flags);
398 DUK_UNREF(rc);
399 DUK_D(DUK_DPRINT("refcount triggered mark-and-sweep => rc %ld", (long) rc));
400 }
401 }
402 #endif /* DUK_USE_MARK_AND_SWEEP && DUK_USE_VOLUNTARY_GC */
403 }
404
405 /*
406 * Incref and decref functions.
407 *
408 * Decref may trigger immediate refzero handling, which may free and finalize
409 * an arbitrary number of objects.
410 *
411 */
412
413 DUK_INTERNAL void duk_heaphdr_refzero(duk_hthread *thr, duk_heaphdr *h) {
414 duk_heap *heap;
415
416 DUK_ASSERT(thr != NULL);
417 DUK_ASSERT(h != NULL);
418
419 heap = thr->heap;
420 DUK_DDD(DUK_DDDPRINT("refzero %p: %!O", (void *) h, (duk_heaphdr *) h));
421
422 /*
423 * Refzero handling is skipped entirely if (1) mark-and-sweep is
424 * running or (2) execution is paused in the debugger. The objects
425 * are left in the heap, and will be freed by mark-and-sweep or
426 * eventual heap destruction.
427 *
428 * This is necessary during mark-and-sweep because refcounts are also
429 * updated during the sweep phase (otherwise objects referenced by a
430 * swept object would have incorrect refcounts) which then calls here.
431 * This could be avoided by using separate decref macros in
432 * mark-and-sweep; however, mark-and-sweep also calls finalizers which
433 * would use the ordinary decref macros anyway and still call this
434 * function.
435 *
436 * This check must be enabled also when mark-and-sweep support has been
437 * disabled: the flag is also used in heap destruction when running
438 * finalizers for remaining objects, and the flag prevents objects from
439 * being moved around in heap linked lists.
440 */
441
442 /* XXX: ideally this would be just one flag (maybe a derived one) so
443 * that a single bit test is sufficient to check the condition.
444 */
445 #if defined(DUK_USE_DEBUGGER_SUPPORT)
446 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap) || DUK_HEAP_IS_PAUSED(heap))) {
447 #else
448 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap))) {
449 #endif
450 DUK_DDD(DUK_DDDPRINT("refzero handling suppressed when mark-and-sweep running, object: %p", (void *) h));
451 return;
452 }
453
454 switch ((duk_small_int_t) DUK_HEAPHDR_GET_TYPE(h)) {
455 case DUK_HTYPE_STRING:
456 /*
457 * Strings have no internal references but do have "weak"
458 * references in the string cache. Also note that strings
459 * are not on the heap_allocated list like other heap
460 * elements.
461 */
462
463 duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
464 duk_heap_string_remove(heap, (duk_hstring *) h);
465 duk_heap_free_heaphdr_raw(heap, h);
466 break;
467
468 case DUK_HTYPE_OBJECT:
469 /*
470 * Objects have internal references. Must finalize through
471 * the "refzero" work list.
472 */
473
474 duk_heap_remove_any_from_heap_allocated(heap, h);
475 duk__queue_refzero(heap, h);
476 duk__refzero_free_pending(thr);
477 break;
478
479 case DUK_HTYPE_BUFFER:
480 /*
481 * Buffers have no internal references. However, a dynamic
482 * buffer has a separate allocation for the buffer. This is
483 * freed by duk_heap_free_heaphdr_raw().
484 */
485
486 duk_heap_remove_any_from_heap_allocated(heap, h);
487 duk_heap_free_heaphdr_raw(heap, h);
488 break;
489
490 default:
491 DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h)));
492 DUK_UNREACHABLE();
493 }
494 }
495
496 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
497 DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
498 DUK_ASSERT(tv != NULL);
499
500 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
501 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
502 DUK_ASSERT(h != NULL);
503 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
504 DUK_ASSERT_DISABLE(h->h_refcount >= 0);
505 DUK_HEAPHDR_PREINC_REFCOUNT(h);
506 }
507 }
508 #endif
509
510 #if 0 /* unused */
511 DUK_INTERNAL void duk_tval_incref_allownull(duk_tval *tv) {
512 if (tv == NULL) {
513 return;
514 }
515 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
516 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
517 DUK_ASSERT(h != NULL);
518 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
519 DUK_ASSERT_DISABLE(h->h_refcount >= 0);
520 DUK_HEAPHDR_PREINC_REFCOUNT(h);
521 }
522 }
523 #endif
524
525 DUK_INTERNAL void duk_tval_decref(duk_hthread *thr, duk_tval *tv) {
526 DUK_ASSERT(thr != NULL);
527 DUK_ASSERT(tv != NULL);
528
529 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
530 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
531 DUK_ASSERT(h != NULL);
532 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
533 duk_heaphdr_decref(thr, h);
534 }
535 }
536
537 #if 0 /* unused */
538 DUK_INTERNAL void duk_tval_decref_allownull(duk_hthread *thr, duk_tval *tv) {
539 DUK_ASSERT(thr != NULL);
540
541 if (tv == NULL) {
542 return;
543 }
544 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
545 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
546 DUK_ASSERT(h != NULL);
547 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
548 duk_heaphdr_decref(thr, h);
549 }
550 }
551 #endif
552
553 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
554 DUK_INTERNAL void duk_heaphdr_incref(duk_heaphdr *h) {
555 DUK_ASSERT(h != NULL);
556 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
557 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);
558
559 #if defined(DUK_USE_ROM_OBJECTS)
560 if (DUK_HEAPHDR_HAS_READONLY(h)) {
561 return;
562 }
563 #endif
564
565 DUK_HEAPHDR_PREINC_REFCOUNT(h);
566 }
567 #endif
568
569 #if 0 /* unused */
570 DUK_INTERNAL void duk_heaphdr_incref_allownull(duk_heaphdr *h) {
571 if (h == NULL) {
572 return;
573 }
574 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
575 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);
576
577 DUK_HEAPHDR_PREINC_REFCOUNT(h);
578 }
579 #endif
580
581 DUK_INTERNAL void duk_heaphdr_decref(duk_hthread *thr, duk_heaphdr *h) {
582 DUK_ASSERT(thr != NULL);
583 DUK_ASSERT(thr->heap != NULL);
584 DUK_ASSERT(h != NULL);
585 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
586 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h) >= 1);
587
588 #if defined(DUK_USE_ROM_OBJECTS)
589 if (DUK_HEAPHDR_HAS_READONLY(h)) {
590 return;
591 }
592 #endif
593 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h) != 0) {
594 return;
595 }
596 duk_heaphdr_refzero(thr, h);
597 }
598
599 DUK_INTERNAL void duk_heaphdr_decref_allownull(duk_hthread *thr, duk_heaphdr *h) {
600 DUK_ASSERT(thr != NULL);
601 DUK_ASSERT(thr->heap != NULL);
602
603 if (h == NULL) {
604 return;
605 }
606 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
607
608 #if defined(DUK_USE_ROM_OBJECTS)
609 if (DUK_HEAPHDR_HAS_READONLY(h)) {
610 return;
611 }
612 #endif
613 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h) >= 1);
614 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h) != 0) {
615 return;
616 }
617 duk_heaphdr_refzero(thr, h);
618 }
619
620 #else
621
622 /* no refcounting */
623
624 #endif /* DUK_USE_REFERENCE_COUNTING */