2 * Reference counting implementation.
5 #include "duk_internal.h"
7 #ifdef DUK_USE_REFERENCE_COUNTING
9 #ifndef DUK_USE_DOUBLE_LINKED_HEAP
10 #error internal error, reference counting requires a double linked heap
17 DUK_LOCAL
void duk__queue_refzero(duk_heap
*heap
, duk_heaphdr
*hdr
) {
18 /* tail insert: don't disturb head in case refzero is running */
20 if (heap
->refzero_list
!= NULL
) {
21 duk_heaphdr
*hdr_prev
;
23 hdr_prev
= heap
->refzero_list_tail
;
24 DUK_ASSERT(hdr_prev
!= NULL
);
25 DUK_ASSERT(DUK_HEAPHDR_GET_NEXT(heap
, hdr_prev
) == NULL
);
27 DUK_HEAPHDR_SET_NEXT(heap
, hdr
, NULL
);
28 DUK_HEAPHDR_SET_PREV(heap
, hdr
, hdr_prev
);
29 DUK_HEAPHDR_SET_NEXT(heap
, hdr_prev
, hdr
);
30 DUK_ASSERT_HEAPHDR_LINKS(heap
, hdr
);
31 DUK_ASSERT_HEAPHDR_LINKS(heap
, hdr_prev
);
32 heap
->refzero_list_tail
= hdr
;
34 DUK_ASSERT(heap
->refzero_list_tail
== NULL
);
35 DUK_HEAPHDR_SET_NEXT(heap
, hdr
, NULL
);
36 DUK_HEAPHDR_SET_PREV(heap
, hdr
, NULL
);
37 DUK_ASSERT_HEAPHDR_LINKS(heap
, hdr
);
38 heap
->refzero_list
= hdr
;
39 heap
->refzero_list_tail
= hdr
;
44 * Heap object refcount finalization.
46 * When an object is about to be freed, all other objects it refers to must
47 * be decref'd. Refcount finalization does NOT free the object or its inner
48 * allocations (mark-and-sweep shares these helpers), it just manipulates
51 * Note that any of the decref's may cause a refcount to drop to zero, BUT
52 * it will not be processed inline; instead, because refzero is already
53 * running, the objects will just be queued to refzero list and processed
54 * later. This eliminates C recursion.
57 DUK_LOCAL
void duk__refcount_finalize_hobject(duk_hthread
*thr
, duk_hobject
*h
) {
61 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE((duk_heaphdr
*) h
) == DUK_HTYPE_OBJECT
);
63 /* XXX: better to get base and walk forwards? */
65 for (i
= 0; i
< (duk_uint_fast32_t
) DUK_HOBJECT_GET_ENEXT(h
); i
++) {
66 duk_hstring
*key
= DUK_HOBJECT_E_GET_KEY(thr
->heap
, h
, i
);
70 duk_heaphdr_decref(thr
, (duk_heaphdr
*) key
);
71 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(thr
->heap
, h
, i
)) {
72 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) DUK_HOBJECT_E_GET_VALUE_GETTER(thr
->heap
, h
, i
));
73 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) DUK_HOBJECT_E_GET_VALUE_SETTER(thr
->heap
, h
, i
));
75 duk_tval_decref(thr
, DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr
->heap
, h
, i
));
79 for (i
= 0; i
< (duk_uint_fast32_t
) DUK_HOBJECT_GET_ASIZE(h
); i
++) {
80 duk_tval_decref(thr
, DUK_HOBJECT_A_GET_VALUE_PTR(thr
->heap
, h
, i
));
83 /* hash part is a 'weak reference' and does not contribute */
85 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) DUK_HOBJECT_GET_PROTOTYPE(thr
->heap
, h
));
87 if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h
)) {
88 duk_hcompiledfunction
*f
= (duk_hcompiledfunction
*) h
;
89 duk_tval
*tv
, *tv_end
;
90 duk_hobject
**funcs
, **funcs_end
;
92 if (DUK_HCOMPILEDFUNCTION_GET_DATA(thr
->heap
, f
) != NULL
) {
93 tv
= DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(thr
->heap
, f
);
94 tv_end
= DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(thr
->heap
, f
);
96 duk_tval_decref(thr
, tv
);
100 funcs
= DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(thr
->heap
, f
);
101 funcs_end
= DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(thr
->heap
, f
);
102 while (funcs
< funcs_end
) {
103 duk_heaphdr_decref(thr
, (duk_heaphdr
*) *funcs
);
107 /* May happen in some out-of-memory corner cases. */
108 DUK_D(DUK_DPRINT("duk_hcompiledfunction 'data' is NULL, skipping decref"));
111 duk_heaphdr_decref(thr
, (duk_heaphdr
*) DUK_HCOMPILEDFUNCTION_GET_DATA(thr
->heap
, f
));
112 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h
)) {
113 duk_hnativefunction
*f
= (duk_hnativefunction
*) h
;
115 /* nothing to finalize */
116 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h
)) {
117 duk_hbufferobject
*b
= (duk_hbufferobject
*) h
;
119 duk_heaphdr_decref(thr
, (duk_heaphdr
*) b
->buf
);
121 } else if (DUK_HOBJECT_IS_THREAD(h
)) {
122 duk_hthread
*t
= (duk_hthread
*) h
;
126 while (tv
< t
->valstack_top
) {
127 duk_tval_decref(thr
, tv
);
131 for (i
= 0; i
< (duk_uint_fast32_t
) t
->callstack_top
; i
++) {
132 duk_activation
*act
= t
->callstack
+ i
;
133 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) DUK_ACT_GET_FUNC(act
));
134 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) act
->var_env
);
135 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) act
->lex_env
);
136 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
137 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) act
->prev_caller
);
141 #if 0 /* nothing now */
142 for (i
= 0; i
< (duk_uint_fast32_t
) t
->catchstack_top
; i
++) {
143 duk_catcher
*cat
= t
->catchstack
+ i
;
147 for (i
= 0; i
< DUK_NUM_BUILTINS
; i
++) {
148 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) t
->builtins
[i
]);
151 duk_heaphdr_decref_allownull(thr
, (duk_heaphdr
*) t
->resumer
);
155 DUK_INTERNAL
void duk_heaphdr_refcount_finalize(duk_hthread
*thr
, duk_heaphdr
*hdr
) {
158 switch ((int) DUK_HEAPHDR_GET_TYPE(hdr
)) {
159 case DUK_HTYPE_OBJECT
:
160 duk__refcount_finalize_hobject(thr
, (duk_hobject
*) hdr
);
162 case DUK_HTYPE_BUFFER
:
163 /* nothing to finalize */
165 case DUK_HTYPE_STRING
:
166 /* cannot happen: strings are not put into refzero list (they don't even have the next/prev pointers) */
172 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
173 DUK_LOCAL duk_ret_t
duk__refcount_fake_finalizer(duk_context
*ctx
) {
175 DUK_D(DUK_DPRINT("fake refcount torture finalizer executed"));
177 DUK_DD(DUK_DDPRINT("fake torture finalizer for: %!T", duk_get_tval(ctx
, 0)));
179 /* Require a lot of stack to force a value stack grow/shrink. */
180 duk_require_stack(ctx
, 100000);
182 /* XXX: do something to force a callstack grow/shrink, perhaps
183 * just a manual forced resize?
188 DUK_LOCAL
void duk__refcount_run_torture_finalizer(duk_hthread
*thr
, duk_hobject
*obj
) {
192 DUK_ASSERT(thr
!= NULL
);
193 DUK_ASSERT(obj
!= NULL
);
194 ctx
= (duk_context
*) thr
;
196 /* Avoid fake finalization for the duk__refcount_fake_finalizer function
197 * itself, otherwise we're in infinite recursion.
199 if (DUK_HOBJECT_HAS_NATIVEFUNCTION(obj
)) {
200 if (((duk_hnativefunction
*) obj
)->func
== duk__refcount_fake_finalizer
) {
201 DUK_DD(DUK_DDPRINT("avoid fake torture finalizer for duk__refcount_fake_finalizer itself"));
205 /* Avoid fake finalization when callstack limit has been reached.
206 * Otherwise a callstack limit error will be created, then refzero'ed,
207 * and we're in an infinite loop.
209 if (thr
->heap
->call_recursion_depth
>= thr
->heap
->call_recursion_limit
||
210 thr
->callstack_size
+ 2 * DUK_CALLSTACK_GROW_STEP
>= thr
->callstack_max
/*approximate*/) {
211 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake torture finalizer"));
215 /* Run fake finalizer. Avoid creating new refzero queue entries
216 * so that we are not forced into a forever loop.
218 duk_push_c_function(ctx
, duk__refcount_fake_finalizer
, 1 /*nargs*/);
219 duk_push_hobject(ctx
, obj
);
220 rc
= duk_pcall(ctx
, 1);
221 DUK_UNREF(rc
); /* ignored */
224 #endif /* DUK_USE_REFZERO_FINALIZER_TORTURE */
227 * Refcount memory freeing loop.
229 * Frees objects in the refzero_pending list until the list becomes
230 * empty. When an object is freed, its references get decref'd and
231 * may cause further objects to be queued for freeing.
233 * This could be expanded to allow incremental freeing: just bail out
234 * early and resume at a future alloc/decref/refzero.
237 DUK_LOCAL
void duk__refzero_free_pending(duk_hthread
*thr
) {
238 duk_heaphdr
*h1
, *h2
;
242 DUK_ASSERT(thr
!= NULL
);
243 DUK_ASSERT(thr
->heap
!= NULL
);
245 DUK_ASSERT(heap
!= NULL
);
248 * Detect recursive invocation
251 if (DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap
)) {
252 DUK_DDD(DUK_DDDPRINT("refzero free running, skip run"));
257 * Churn refzero_list until empty
260 DUK_HEAP_SET_REFZERO_FREE_RUNNING(heap
);
261 while (heap
->refzero_list
) {
263 duk_bool_t rescued
= 0;
266 * Pick an object from the head (don't remove yet).
269 h1
= heap
->refzero_list
;
270 obj
= (duk_hobject
*) h1
;
271 DUK_DD(DUK_DDPRINT("refzero processing %p: %!O", (void *) h1
, (duk_heaphdr
*) h1
));
272 DUK_ASSERT(DUK_HEAPHDR_GET_PREV(heap
, h1
) == NULL
);
273 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(h1
) == DUK_HTYPE_OBJECT
); /* currently, always the case */
275 #if defined(DUK_USE_REFZERO_FINALIZER_TORTURE)
276 /* Torture option to shake out finalizer side effect issues:
277 * make a bogus function call for every finalizable object,
278 * essentially simulating the case where everything has a
281 DUK_DD(DUK_DDPRINT("refzero torture enabled, fake finalizer"));
282 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1
) == 0);
283 DUK_HEAPHDR_PREINC_REFCOUNT(h1
); /* bump refcount to prevent refzero during finalizer processing */
284 duk__refcount_run_torture_finalizer(thr
, obj
); /* must never longjmp */
285 DUK_HEAPHDR_PREDEC_REFCOUNT(h1
); /* remove artificial bump */
286 DUK_ASSERT_DISABLE(h1
->h_refcount
>= 0); /* refcount is unsigned, so always true */
292 * Note: running a finalizer may have arbitrary side effects, e.g.
293 * queue more objects on refzero_list (tail), or even trigger a
296 * Note: quick reject check should match vast majority of
297 * objects and must be safe (not throw any errors, ever).
300 /* An object may have FINALIZED here if it was finalized by mark-and-sweep
301 * on a previous run and refcount then decreased to zero. We won't run the
302 * finalizer again here.
305 /* A finalizer is looked up from the object and up its prototype chain
306 * (which allows inherited finalizers).
308 if (duk_hobject_hasprop_raw(thr
, obj
, DUK_HTHREAD_STRING_INT_FINALIZER(thr
))) {
309 DUK_DDD(DUK_DDDPRINT("object has a finalizer, run it"));
311 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h1
) == 0);
312 DUK_HEAPHDR_PREINC_REFCOUNT(h1
); /* bump refcount to prevent refzero during finalizer processing */
314 duk_hobject_run_finalizer(thr
, obj
); /* must never longjmp */
315 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1
)); /* duk_hobject_run_finalizer() sets */
317 DUK_HEAPHDR_PREDEC_REFCOUNT(h1
); /* remove artificial bump */
318 DUK_ASSERT_DISABLE(h1
->h_refcount
>= 0); /* refcount is unsigned, so always true */
320 if (DUK_HEAPHDR_GET_REFCOUNT(h1
) != 0) {
321 DUK_DDD(DUK_DDDPRINT("-> object refcount after finalization non-zero, object will be rescued"));
324 DUK_DDD(DUK_DDDPRINT("-> object refcount still zero after finalization, object will be freed"));
328 /* Refzero head is still the same. This is the case even if finalizer
329 * inserted more refzero objects; they are inserted to the tail.
331 DUK_ASSERT(h1
== heap
->refzero_list
);
334 * Remove the object from the refzero list. This cannot be done
335 * before a possible finalizer has been executed; the finalizer
336 * may trigger a mark-and-sweep, and mark-and-sweep must be able
337 * to traverse a complete refzero_list.
340 h2
= DUK_HEAPHDR_GET_NEXT(heap
, h1
);
342 DUK_HEAPHDR_SET_PREV(heap
, h2
, NULL
); /* not strictly necessary */
343 heap
->refzero_list
= h2
;
345 heap
->refzero_list
= NULL
;
346 heap
->refzero_list_tail
= NULL
;
354 /* yes -> move back to heap allocated */
355 DUK_DD(DUK_DDPRINT("object rescued during refcount finalization: %p", (void *) h1
));
356 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(h1
));
357 DUK_ASSERT(DUK_HEAPHDR_HAS_FINALIZED(h1
));
358 DUK_HEAPHDR_CLEAR_FINALIZED(h1
);
359 h2
= heap
->heap_allocated
;
360 DUK_HEAPHDR_SET_PREV(heap
, h1
, NULL
);
362 DUK_HEAPHDR_SET_PREV(heap
, h2
, h1
);
364 DUK_HEAPHDR_SET_NEXT(heap
, h1
, h2
);
365 DUK_ASSERT_HEAPHDR_LINKS(heap
, h1
);
366 DUK_ASSERT_HEAPHDR_LINKS(heap
, h2
);
367 heap
->heap_allocated
= h1
;
369 /* no -> decref members, then free */
370 duk__refcount_finalize_hobject(thr
, obj
);
371 duk_heap_free_heaphdr_raw(heap
, h1
);
376 DUK_HEAP_CLEAR_REFZERO_FREE_RUNNING(heap
);
378 DUK_DDD(DUK_DDDPRINT("refzero processed %ld objects", (long) count
));
381 * Once the whole refzero cascade has been freed, check for
382 * a voluntary mark-and-sweep.
385 #if defined(DUK_USE_MARK_AND_SWEEP) && defined(DUK_USE_VOLUNTARY_GC)
386 /* 'count' is more or less comparable to normal trigger counter update
387 * which happens in memory block (re)allocation.
389 heap
->mark_and_sweep_trigger_counter
-= count
;
390 if (heap
->mark_and_sweep_trigger_counter
<= 0) {
391 if (DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap
)) {
392 DUK_D(DUK_DPRINT("mark-and-sweep in progress -> skip voluntary mark-and-sweep now"));
395 duk_small_uint_t flags
= 0; /* not emergency */
396 DUK_D(DUK_DPRINT("refcount triggering mark-and-sweep"));
397 rc
= duk_heap_mark_and_sweep(heap
, flags
);
399 DUK_D(DUK_DPRINT("refcount triggered mark-and-sweep => rc %ld", (long) rc
));
402 #endif /* DUK_USE_MARK_AND_SWEEP && DUK_USE_VOLUNTARY_GC */
406 * Incref and decref functions.
408 * Decref may trigger immediate refzero handling, which may free and finalize
409 * an arbitrary number of objects.
413 DUK_INTERNAL
void duk_heaphdr_refzero(duk_hthread
*thr
, duk_heaphdr
*h
) {
416 DUK_ASSERT(thr
!= NULL
);
417 DUK_ASSERT(h
!= NULL
);
420 DUK_DDD(DUK_DDDPRINT("refzero %p: %!O", (void *) h
, (duk_heaphdr
*) h
));
423 * Refzero handling is skipped entirely if (1) mark-and-sweep is
424 * running or (2) execution is paused in the debugger. The objects
425 * are left in the heap, and will be freed by mark-and-sweep or
426 * eventual heap destruction.
428 * This is necessary during mark-and-sweep because refcounts are also
429 * updated during the sweep phase (otherwise objects referenced by a
430 * swept object would have incorrect refcounts) which then calls here.
431 * This could be avoided by using separate decref macros in
432 * mark-and-sweep; however, mark-and-sweep also calls finalizers which
433 * would use the ordinary decref macros anyway and still call this
436 * This check must be enabled also when mark-and-sweep support has been
437 * disabled: the flag is also used in heap destruction when running
438 * finalizers for remaining objects, and the flag prevents objects from
439 * being moved around in heap linked lists.
442 /* XXX: ideally this would be just one flag (maybe a derived one) so
443 * that a single bit test is sufficient to check the condition.
445 #if defined(DUK_USE_DEBUGGER_SUPPORT)
446 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap
) || DUK_HEAP_IS_PAUSED(heap
))) {
448 if (DUK_UNLIKELY(DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap
))) {
450 DUK_DDD(DUK_DDDPRINT("refzero handling suppressed when mark-and-sweep running, object: %p", (void *) h
));
454 switch ((duk_small_int_t
) DUK_HEAPHDR_GET_TYPE(h
)) {
455 case DUK_HTYPE_STRING
:
457 * Strings have no internal references but do have "weak"
458 * references in the string cache. Also note that strings
459 * are not on the heap_allocated list like other heap
463 duk_heap_strcache_string_remove(heap
, (duk_hstring
*) h
);
464 duk_heap_string_remove(heap
, (duk_hstring
*) h
);
465 duk_heap_free_heaphdr_raw(heap
, h
);
468 case DUK_HTYPE_OBJECT
:
470 * Objects have internal references. Must finalize through
471 * the "refzero" work list.
474 duk_heap_remove_any_from_heap_allocated(heap
, h
);
475 duk__queue_refzero(heap
, h
);
476 duk__refzero_free_pending(thr
);
479 case DUK_HTYPE_BUFFER
:
481 * Buffers have no internal references. However, a dynamic
482 * buffer has a separate allocation for the buffer. This is
483 * freed by duk_heap_free_heaphdr_raw().
486 duk_heap_remove_any_from_heap_allocated(heap
, h
);
487 duk_heap_free_heaphdr_raw(heap
, h
);
491 DUK_D(DUK_DPRINT("invalid heap type in decref: %ld", (long) DUK_HEAPHDR_GET_TYPE(h
)));
496 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
497 DUK_INTERNAL
void duk_tval_incref(duk_tval
*tv
) {
498 DUK_ASSERT(tv
!= NULL
);
500 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv
)) {
501 duk_heaphdr
*h
= DUK_TVAL_GET_HEAPHDR(tv
);
502 DUK_ASSERT(h
!= NULL
);
503 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
504 DUK_ASSERT_DISABLE(h
->h_refcount
>= 0);
505 DUK_HEAPHDR_PREINC_REFCOUNT(h
);
511 DUK_INTERNAL
void duk_tval_incref_allownull(duk_tval
*tv
) {
515 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv
)) {
516 duk_heaphdr
*h
= DUK_TVAL_GET_HEAPHDR(tv
);
517 DUK_ASSERT(h
!= NULL
);
518 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
519 DUK_ASSERT_DISABLE(h
->h_refcount
>= 0);
520 DUK_HEAPHDR_PREINC_REFCOUNT(h
);
525 DUK_INTERNAL
void duk_tval_decref(duk_hthread
*thr
, duk_tval
*tv
) {
526 DUK_ASSERT(thr
!= NULL
);
527 DUK_ASSERT(tv
!= NULL
);
529 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv
)) {
530 duk_heaphdr
*h
= DUK_TVAL_GET_HEAPHDR(tv
);
531 DUK_ASSERT(h
!= NULL
);
532 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
533 duk_heaphdr_decref(thr
, h
);
538 DUK_INTERNAL
void duk_tval_decref_allownull(duk_hthread
*thr
, duk_tval
*tv
) {
539 DUK_ASSERT(thr
!= NULL
);
544 if (DUK_TVAL_NEEDS_REFCOUNT_UPDATE(tv
)) {
545 duk_heaphdr
*h
= DUK_TVAL_GET_HEAPHDR(tv
);
546 DUK_ASSERT(h
!= NULL
);
547 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
548 duk_heaphdr_decref(thr
, h
);
553 #if !defined(DUK_USE_FAST_REFCOUNT_DEFAULT)
554 DUK_INTERNAL
void duk_heaphdr_incref(duk_heaphdr
*h
) {
555 DUK_ASSERT(h
!= NULL
);
556 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
557 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h
) >= 0);
559 #if defined(DUK_USE_ROM_OBJECTS)
560 if (DUK_HEAPHDR_HAS_READONLY(h
)) {
565 DUK_HEAPHDR_PREINC_REFCOUNT(h
);
570 DUK_INTERNAL
void duk_heaphdr_incref_allownull(duk_heaphdr
*h
) {
574 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
575 DUK_ASSERT_DISABLE(DUK_HEAPHDR_GET_REFCOUNT(h
) >= 0);
577 DUK_HEAPHDR_PREINC_REFCOUNT(h
);
581 DUK_INTERNAL
void duk_heaphdr_decref(duk_hthread
*thr
, duk_heaphdr
*h
) {
582 DUK_ASSERT(thr
!= NULL
);
583 DUK_ASSERT(thr
->heap
!= NULL
);
584 DUK_ASSERT(h
!= NULL
);
585 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
586 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h
) >= 1);
588 #if defined(DUK_USE_ROM_OBJECTS)
589 if (DUK_HEAPHDR_HAS_READONLY(h
)) {
593 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h
) != 0) {
596 duk_heaphdr_refzero(thr
, h
);
599 DUK_INTERNAL
void duk_heaphdr_decref_allownull(duk_hthread
*thr
, duk_heaphdr
*h
) {
600 DUK_ASSERT(thr
!= NULL
);
601 DUK_ASSERT(thr
->heap
!= NULL
);
606 DUK_ASSERT(DUK_HEAPHDR_HTYPE_VALID(h
));
608 #if defined(DUK_USE_ROM_OBJECTS)
609 if (DUK_HEAPHDR_HAS_READONLY(h
)) {
613 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(h
) >= 1);
614 if (DUK_HEAPHDR_PREDEC_REFCOUNT(h
) != 0) {
617 duk_heaphdr_refzero(thr
, h
);
624 #endif /* DUK_USE_REFERENCE_COUNTING */