]> git.proxmox.com Git - ceph.git/blob - ceph/src/civetweb/src/third_party/duktape-1.5.2/src-separate/duk_heap_refcount.c
update sources to ceph Nautilus 14.2.1
[ceph.git] / ceph / src / civetweb / src / third_party / duktape-1.5.2 / src-separate / duk_heap_refcount.c
1 /*
2 * Reference counting implementation.
3 */
4
5 #include "duk_internal.h"
6
7 #ifdef DUK_USE_REFERENCE_COUNTING
8
9 #ifndef DUK_USE_DOUBLE_LINKED_HEAP
10 #error internal error, reference counting requires a double linked heap
11 #endif
12
13 /*
14 * Misc
15 */
16
17 DUK_LOCAL void duk__queue_refzero(duk_heap *heap, duk_heaphdr *hdr) {
18 /* tail insert: don't disturb head in case refzero is running */
19
20 if (heap->refzero_list != NULL) {
21 duk_heaphdr *hdr_prev;
22
23 hdr_prev = heap->refzero_list_tail;
24 DUK_ASSERT(hdr_prev != NULL);
25 DUK_ASSERT(DUK_HEAPHDR_GET_NEXT(heap, hdr_prev) == NULL);
26
27 DUK_HEAPHDR_SET_NEXT(heap, hdr, NULL);
28 DUK_HEAPHDR_SET_PREV(heap, hdr, hdr_prev);
29 DUK_HEAPHDR_SET_NEXT(heap, hdr_prev, hdr);
30 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr);
31 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr_prev);
32 heap->refzero_list_tail = hdr;
33 } else {
34 DUK_ASSERT(heap->refzero_list_tail == NULL);
35 DUK_HEAPHDR_SET_NEXT(heap, hdr, NULL);
36 DUK_HEAPHDR_SET_PREV(heap, hdr, NULL);
37 DUK_ASSERT_HEAPHDR_LINKS(heap, hdr);
38 heap->refzero_list = hdr;
39 heap->refzero_list_tail = hdr;
40 }
41 }
42
43 /*
44 * Heap object refcount finalization.
45 *
46 * When an object is about to be freed, all other objects it refers to must
47 * be decref'd. Refcount finalization does NOT free the object or its inner
48 * allocations (mark-and-sweep shares these helpers), it just manipulates
49 * the refcounts.
50 *
51 * Note that any of the decref's may cause a refcount to drop to zero, BUT
52 * it will not be processed inline; instead, because refzero is already
53 * running, the objects will just be queued to refzero list and processed
54 * later. This eliminates C recursion.
55 */
56
57 DUK_LOCAL void duk__refcount_finalize_hobject(duk_hthread *thr, duk_hobject *h) {
58 duk_uint_fast32_t i;
59
60 DUK_ASSERT(h);
61 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE((duk_heaphdr *) h) == DUK_HTYPE_OBJECT);
62
63 /* XXX: better to get base and walk forwards? */
64
65 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(h); i++) {
66 duk_hstring *key = DUK_HOBJECT_E_GET_KEY(thr->heap, h, i);
67 if (!key) {
68 continue;
69 }
70 duk_heaphdr_decref(thr, (duk_heaphdr *) key);
71 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(thr->heap, h, i)) {
72 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_GETTER(thr->heap, h, i));
73 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_E_GET_VALUE_SETTER(thr->heap, h, i));
74 } else {
75 duk_tval_decref(thr, DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, h, i));
76 }
77 }
78
79 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ASIZE(h); i++) {
80 duk_tval_decref(thr, DUK_HOBJECT_A_GET_VALUE_PTR(thr->heap, h, i));
81 }
82
83 /* hash part is a 'weak reference' and does not contribute */
84
85 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_HOBJECT_GET_PROTOTYPE(thr->heap, h));
86
87 if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h)) {
88 duk_hcompiledfunction *f = (duk_hcompiledfunction *) h;
89 duk_tval *tv, *tv_end;
90 duk_hobject **funcs, **funcs_end;
91
92 if (DUK_HCOMPILEDFUNCTION_GET_DATA(thr->heap, f) != NULL) {
93 tv = DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(thr->heap, f);
94 tv_end = DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(thr->heap, f);
95 while (tv < tv_end) {
96 duk_tval_decref(thr, tv);
97 tv++;
98 }
99
100 funcs = DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(thr->heap, f);
101 funcs_end = DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(thr->heap, f);
102 while (funcs < funcs_end) {
103 duk_heaphdr_decref(thr, (duk_heaphdr *) *funcs);
104 funcs++;
105 }
106 } else {
107 /* May happen in some out-of-memory corner cases. */
108 DUK_D(DUK_DPRINT("duk_hcompiledfunction 'data' is NULL, skipping decref"));
109 }
110
111 duk_heaphdr_decref(thr, (duk_heaphdr *) DUK_HCOMPILEDFUNCTION_GET_DATA(thr->heap, f));
112 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h)) {
113 duk_hnativefunction *f = (duk_hnativefunction *) h;
114 DUK_UNREF(f);
115 /* nothing to finalize */
116 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h)) {
117 duk_hbufferobject *b = (duk_hbufferobject *) h;
118 if (b->buf) {
119 duk_heaphdr_decref(thr, (duk_heaphdr *) b->buf);
120 }
121 } else if (DUK_HOBJECT_IS_THREAD(h)) {
122 duk_hthread *t = (duk_hthread *) h;
123 duk_tval *tv;
124
125 tv = t->valstack;
126 while (tv < t->valstack_top) {
127 duk_tval_decref(thr, tv);
128 tv++;
129 }
130
131 for (i = 0; i < (duk_uint_fast32_t) t->callstack_top; i++) {
132 duk_activation *act = t->callstack + i;
133 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) DUK_ACT_GET_FUNC(act));
134 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->var_env);
135 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->lex_env);
136 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
137 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) act->prev_caller);
138 #endif
139 }
140
141 #if 0 /* nothing now */
142 for (i = 0; i < (duk_uint_fast32_t) t->catchstack_top; i++) {
143 duk_catcher *cat = t->catchstack + i;
144 }
145 #endif
146
147 for (i = 0; i < DUK_NUM_BUILTINS; i++) {
148 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) t->builtins[i]);
149 }
150
151 duk_heaphdr_decref_allownull(thr, (duk_heaphdr *) t->resumer);
152 }
153 }
154
155 DUK_INTERNAL void duk_heaphdr_refcount_finalize(duk_hthread *thr, duk_heaphdr *hdr) {
156 DUK_ASSERT(hdr);
157
158 switch ((int) DUK_HEAPHDR_GET_TYPE(hdr)) {
159 case DUK_HTYPE_OBJECT:
160 duk__refcount_finalize_hobject(thr, (duk_hobject *) hdr);
161 break;
162 case DUK_HTYPE_BUFFER:
163 /* nothing to finalize */
164 break;
165 case DUK_HTYPE_STRING:
166 /* cannot happen: strings are not put into refzero list (they don't even have the next/prev pointers) */
167 default:
168 DUK_UNREACHABLE();
169 }
170 }
171
172 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
173 DUK_LOCAL duk_ret_t duk__refcount_fake_finalizer(duk_context *ctx) {
174 DUK_UNREF(ctx);
175 DUK_D(DUK_DPRINT("fake refcount torture finalizer executed"));
176 #if 0
177 DUK_DD(DUK_DDPRINT("fake torture finalizer for: %!T", duk_get_tval(ctx, 0)));
178 #endif
179 /* Require a lot of stack to force a value stack grow/shrink. */
180 duk_require_stack(ctx, 100000);
181
182 /* XXX: do something to force a callstack grow/shrink, perhaps
183 * just a manual forced resize?
184 */
185 return 0;
186 }
187
188 DUK_LOCAL void duk__refcount_run_torture_finalizer(duk_hthread *thr, duk_hobject *obj) {
189 duk_context *ctx;
190 duk_int_t rc;
191
192 DUK_ASSERT(thr != NULL);
193 DUK_ASSERT(obj != NULL);
194 ctx = (duk_context *) thr;
195
196 /* Avoid fake finalization for the duk__refcount_fake_finalizer function
197 * itself, otherwise we're in infinite recursion.
198 */
199 if (DUK_HOBJECT_HAS_NATIVEFUNCTION(obj)) {
200 if (((duk_hnativefunction *) obj)->func == duk__refcount_fake_finalizer) {
201 DUK_DD(DUK_DDPRINT("avoid fake torture finalizer for duk__refcount_fake_finalizer itself"));
202 return;
203 }
204 }
205 /* Avoid fake finalization when callstack limit has been reached.
206 * Otherwise a callstack limit error will be created, then refzero'ed,
207 * and we're in an infinite loop.
208 */
209 if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit ||
210 thr->callstack_size + 2 * DUK_CALLSTACK_GROW_STEP >= thr->callstack_max /*approximate*/) {
211 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake torture finalizer"));
212 return;
213 }
214
215 /* Run fake finalizer. Avoid creating new refzero queue entries
216 * so that we are not forced into a forever loop.
217 */
218 duk_push_c_function(ctx, duk__refcount_fake_finalizer, 1 /*nargs*/);
219 duk_push_hobject(ctx, obj);
220 rc = duk_pcall(ctx, 1);
221 DUK_UNREF(rc); /* ignored */
222 duk_pop(ctx);
223 }
224 #endif /* DUK_USE_REFZERO_FINALIZER_TORTURE */
225
226 /*
227 * Refcount memory freeing loop.
228 *
229 * Frees objects in the refzero_pending list until the list becomes
230 * empty. When an object is freed, its references get decref'd and
231 * may cause further objects to be queued for freeing.
232 *
233 * This could be expanded to allow incremental freeing: just bail out
234 * early and resume at a future alloc/decref/refzero.
235 */
236
237 DUK_LOCAL void duk__refzero_free_pending(duk_hthread *thr) {
238 duk_heaphdr *h1, *h2;
239 duk_heap *heap;
240 duk_int_t count = 0;
241
242 DUK_ASSERT(thr != NULL);
243 DUK_ASSERT(thr->heap != NULL);
244 heap = thr->heap;
245 DUK_ASSERT(heap != NULL);
246
247 /*
248 * Detect recursive invocation
249 */
250
251 if (DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap)) {
252 DUK_DDD(DUK_DDDPRINT("refzero free running, skip run"));
253 return;
254 }
255
256 /*
257 * Churn refzero_list until empty
258 */
259
260 DUK_HEAP_SET_REFZERO_FREE_RUNNING(heap);
261 while (heap->refzero_list) {
262 duk_hobject *obj;
263 duk_bool_t rescued = 0;
264
265 /*
266 * Pick an object from the head (don't remove yet).
267 */
268
269 h1 = heap->refzero_list;
270 obj = (duk_hobject *) h1;
271 DUK_DD(DUK_DDPRINT("refzero processing %p: %!O", (void *) h1, (duk_heaphdr *) h1));
272 DUK_ASSERT(DUK_HEAPHDR_GET_PREV(heap, h1) == NULL);
273 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(h1) == DUK_HTYPE_OBJECT); /* currently, always the case */
274
275 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
276 /* Torture option to shake out finalizer side effect issues:
277 * make a bogus function call for every finalizable object,
278 * essentially simulating the case where everything has a
279 * finalizer.
280 */
281 DUK_DD(DUK_DDPRINT("refzero torture enabled, fake finalizer"));
282 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1) == 0);
283 DUK_HEAPHDR_PREINC_REFCOUNT(h1); /* bump refcount to prevent refzero during finalizer processing */
284 duk__refcount_run_torture_finalizer(thr, obj); /* must never longjmp */
285 DUK_HEAPHDR_PREDEC_REFCOUNT(h1); /* remove artificial bump */
286 DUK_ASSERT_DISABLE(h1->h_refcount >= 0); /* refcount is unsigned, so always true */
287 #endif
288
289 /*
290 * Finalizer check.
291 *
292 * Note: running a finalizer may have arbitrary side effects, e.g.
293 * queue more objects on refzero_list (tail), or even trigger a
294 * mark-and-sweep.
295 *
296 * Note: quick reject check should match vast majority of
297 * objects and must be safe (not throw any errors, ever).
298 */
299
300 /* An object may have FINALIZED here if it was finalized by mark-and-sweep
301 * on a previous run and refcount then decreased to zero. We won't run the
302 * finalizer again here.
303 */
304
305 /* A finalizer is looked up from the object and up its prototype chain
306 * (which allows inherited finalizers).
307 */
308 if (duk_hobject_hasprop_raw(thr, obj, DUK_HTHREAD_STRING_INT_FINALIZER(thr))) {
309 DUK_DDD(DUK_DDDPRINT("object has a finalizer, run it"));
310
311 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1) == 0);
312 DUK_HEAPHDR_PREINC_REFCOUNT(h1); /* bump refcount to prevent refzero during finalizer processing */
313
314 duk_hobject_run_finalizer(thr, obj); /* must never longjmp */
315 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1)); /* duk_hobject_run_finalizer() sets */
316
317 DUK_HEAPHDR_PREDEC_REFCOUNT(h1); /* remove artificial bump */
318 DUK_ASSERT_DISABLE(h1->h_refcount >= 0); /* refcount is unsigned, so always true */
319
320 if (DUK_HEAPHDR_GET_REFCOUNT(h1) != 0) {
321 DUK_DDD(DUK_DDDPRINT("-> object refcount after finalization non-zero, object will be rescued"));
322 rescued = 1;
323 } else {
324 DUK_DDD(DUK_DDDPRINT("-> object refcount still zero after finalization, object will be freed"));
325 }
326 }
327
328 /* Refzero head is still the same. This is the case even if finalizer
329 * inserted more refzero objects; they are inserted to the tail.
330 */
331 DUK_ASSERT(h1 == heap->refzero_list);
332
333 /*
334 * Remove the object from the refzero list. This cannot be done
335 * before a possible finalizer has been executed; the finalizer
336 * may trigger a mark-and-sweep, and mark-and-sweep must be able
337 * to traverse a complete refzero_list.
338 */
339
340 h2 = DUK_HEAPHDR_GET_NEXT(heap, h1);
341 if (h2) {
342 DUK_HEAPHDR_SET_PREV(heap, h2, NULL); /* not strictly necessary */
343 heap->refzero_list = h2;
344 } else {
345 heap->refzero_list = NULL;
346 heap->refzero_list_tail = NULL;
347 }
348
349 /*
350 * Rescue or free.
351 */
352
353 if (rescued) {
354 /* yes -> move back to heap allocated */
355 DUK_DD(DUK_DDPRINT("object rescued during refcount finalization: %p", (void *) h1));
356 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(h1));
357 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1));
358 DUK_HEAPHDR_CLEAR_FINALIZED(h1);
359 h2 = heap->heap_allocated;
360 DUK_HEAPHDR_SET_PREV(heap, h1, NULL);
361 if (h2) {
362 DUK_HEAPHDR_SET_PREV(heap, h2, h1);
363 }
364 DUK_HEAPHDR_SET_NEXT(heap, h1, h2);
365 DUK_ASSERT_HEAPHDR_LINKS(heap, h1);
366 DUK_ASSERT_HEAPHDR_LINKS(heap, h2);
367 heap->heap_allocated = h1;
368 } else {
369 /* no -> decref members, then free */
370 duk__refcount_finalize_hobject(thr, obj);
371 duk_heap_free_heaphdr_raw(heap, h1);
372 }
373
374 count++;
375 }
376 DUK_HEAP_CLEAR_REFZERO_FREE_RUNNING(heap);
377
378 DUK_DDD(DUK_DDDPRINT("refzero processed %ld objects", (long) count));
379
380 /*
381 * Once the whole refzero cascade has been freed, check for
382 * a voluntary mark-and-sweep.
383 */
384
385 #if defined(DUK_USE_MARK_AND_SWEEP) && defined(DUK_USE_VOLUNTARY_GC)
386 /* 'count' is more or less comparable to normal trigger counter update
387 * which happens in memory block (re)allocation.
388 */
389 heap->mark_and_sweep_trigger_counter -= count;
390 if (heap->mark_and_sweep_trigger_counter <= 0) {
391 duk_bool_t rc;
392 duk_small_uint_t flags = 0; /* not emergency */
393 DUK_D(DUK_DPRINT("refcount triggering mark-and-sweep"));
394 rc = duk_heap_mark_and_sweep(heap, flags);
395 DUK_UNREF(rc);
396 DUK_D(DUK_DPRINT("refcount triggered mark-and-sweep => rc %ld", (long) rc));
397 }
398 #endif /* DUK_USE_MARK_AND_SWEEP && DUK_USE_VOLUNTARY_GC */
399 }
400
401 /*
402 * Incref and decref functions.
403 *
404 * Decref may trigger immediate refzero handling, which may free and finalize
405 * an arbitrary number of objects.
406 *
407 */
408
409 DUK_INTERNAL void duk_heaphdr_refzero(duk_hthread *thr, duk_heaphdr *h) {
410 duk_heap *heap;
411
412 DUK_ASSERT(thr != NULL);
413 DUK_ASSERT(h != NULL);
414
415 heap = thr->heap;
416 DUK_DDD(DUK_DDDPRINT("refzero %p: %!O", (void *) h, (duk_heaphdr *) h));
417
418 /*
419 * Refzero handling is skipped entirely if (1) mark-and-sweep is
420 * running or (2) execution is paused in the debugger. The objects
421 * are left in the heap, and will be freed by mark-and-sweep or
422 * eventual heap destruction.
423 *
424 * This is necessary during mark-and-sweep because refcounts are also
425 * updated during the sweep phase (otherwise objects referenced by a
426 * swept object would have incorrect refcounts) which then calls here.
427 * This could be avoided by using separate decref macros in
428 * mark-and-sweep; however, mark-and-sweep also calls finalizers which
429 * would use the ordinary decref macros anyway and still call this
430 * function.
431 *
432 * This check must be enabled also when mark-and-sweep support has been
433 * disabled: the flag is also used in heap destruction when running
434 * finalizers for remaining objects, and the flag prevents objects from
435 * being moved around in heap linked lists.
436 */
437
438 /* XXX: ideally this would be just one flag (maybe a derived one) so
439 * that a single bit test is sufficient to check the condition.
440 */
441 #if defined(DUK_USE_DEBUGGER_SUPPORT)
442 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap) || DUK_HEAP_IS_PAUSED(heap))) {
443 #else
444 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap))) {
445 #endif
446 DUK_DDD(DUK_DDDPRINT("refzero handling suppressed when mark-and-sweep running, object: %p", (void *) h));
447 return;
448 }
449
450 switch ((duk_small_int_t) DUK_HEAPHDR_GET_TYPE(h)) {
451 case DUK_HTYPE_STRING:
452 /*
453 * Strings have no internal references but do have "weak"
454 * references in the string cache. Also note that strings
455 * are not on the heap_allocated list like other heap
456 * elements.
457 */
458
459 duk_heap_strcache_string_remove(heap, (duk_hstring *) h);
460 duk_heap_string_remove(heap, (duk_hstring *) h);
461 duk_heap_free_heaphdr_raw(heap, h);
462 break;
463
464 case DUK_HTYPE_OBJECT:
465 /*
466 * Objects have internal references. Must finalize through
467 * the "refzero" work list.
468 */
469
470 duk_heap_remove_any_from_heap_allocated(heap, h);
471 duk__queue_refzero(heap, h);
472 duk__refzero_free_pending(thr);
473 break;
474
475 case DUK_HTYPE_BUFFER:
476 /*
477 * Buffers have no internal references. However, a dynamic
478 * buffer has a separate allocation for the buffer. This is
479 * freed by duk_heap_free_heaphdr_raw().
480 */
481
482 duk_heap_remove_any_from_heap_allocated(heap, h);
483 duk_heap_free_heaphdr_raw(heap, h);
484 break;
485
486 default:
487 DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h)));
488 DUK_UNREACHABLE();
489 }
490 }
491
492 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
493 DUK_INTERNAL void duk_tval_incref(duk_tval *tv) {
494 DUK_ASSERT(tv != NULL);
495
496 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
497 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
498 DUK_ASSERT(h != NULL);
499 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
500 DUK_ASSERT_DISABLE(h->h_refcount >= 0);
501 DUK_HEAPHDR_PREINC_REFCOUNT(h);
502 }
503 }
504 #endif
505
506 #if 0 /* unused */
507 DUK_INTERNAL void duk_tval_incref_allownull(duk_tval *tv) {
508 if (tv == NULL) {
509 return;
510 }
511 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
512 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
513 DUK_ASSERT(h != NULL);
514 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
515 DUK_ASSERT_DISABLE(h->h_refcount >= 0);
516 DUK_HEAPHDR_PREINC_REFCOUNT(h);
517 }
518 }
519 #endif
520
521 DUK_INTERNAL void duk_tval_decref(duk_hthread *thr, duk_tval *tv) {
522 DUK_ASSERT(thr != NULL);
523 DUK_ASSERT(tv != NULL);
524
525 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
526 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
527 DUK_ASSERT(h != NULL);
528 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
529 duk_heaphdr_decref(thr, h);
530 }
531 }
532
533 #if 0 /* unused */
534 DUK_INTERNAL void duk_tval_decref_allownull(duk_hthread *thr, duk_tval *tv) {
535 DUK_ASSERT(thr != NULL);
536
537 if (tv == NULL) {
538 return;
539 }
540 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv)) {
541 duk_heaphdr *h = DUK_TVAL_GET_HEAPHDR(tv);
542 DUK_ASSERT(h != NULL);
543 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
544 duk_heaphdr_decref(thr, h);
545 }
546 }
547 #endif
548
549 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
550 DUK_INTERNAL void duk_heaphdr_incref(duk_heaphdr *h) {
551 DUK_ASSERT(h != NULL);
552 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
553 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);
554
555 DUK_HEAPHDR_PREINC_REFCOUNT(h);
556 }
557 #endif
558
559 #if 0 /* unused */
560 DUK_INTERNAL void duk_heaphdr_incref_allownull(duk_heaphdr *h) {
561 if (h == NULL) {
562 return;
563 }
564 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
565 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h) >= 0);
566
567 DUK_HEAPHDR_PREINC_REFCOUNT(h);
568 }
569 #endif
570
571 DUK_INTERNAL void duk_heaphdr_decref(duk_hthread *thr, duk_heaphdr *h) {
572 DUK_ASSERT(thr != NULL);
573 DUK_ASSERT(thr->heap != NULL);
574 DUK_ASSERT(h != NULL);
575 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
576 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h) >= 1);
577
578 #if defined(DUK_USE_ROM_OBJECTS)
579 if (DUK_HEAPHDR_HAS_READONLY(h)) {
580 return;
581 }
582 #endif
583 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h) != 0) {
584 return;
585 }
586 duk_heaphdr_refzero(thr, h);
587 }
588
589 DUK_INTERNAL void duk_heaphdr_decref_allownull(duk_hthread *thr, duk_heaphdr *h) {
590 DUK_ASSERT(thr != NULL);
591 DUK_ASSERT(thr->heap != NULL);
592
593 if (h == NULL) {
594 return;
595 }
596 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h));
597
598 #if defined(DUK_USE_ROM_OBJECTS)
599 if (DUK_HEAPHDR_HAS_READONLY(h)) {
600 return;
601 }
602 #endif
603 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h) >= 1);
604 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h) != 0) {
605 return;
606 }
607 duk_heaphdr_refzero(thr, h);
608 }
609
610 #else
611
612 /* no refcounting */
613
614 #endif /* DUK_USE_REFERENCE_COUNTING */