2 * Mark-and-sweep garbage collection.
5 #include "duk_internal.h"
7 #ifdef DUK_USE_MARK_AND_SWEEP
9 DUK_LOCAL_DECL
void duk__mark_heaphdr(duk_heap
*heap
, duk_heaphdr
*h
);
10 DUK_LOCAL_DECL
void duk__mark_tval(duk_heap
*heap
, duk_tval
*tv
);
16 /* Select a thread for mark-and-sweep use.
18 * XXX: This needs to change later.
20 DUK_LOCAL duk_hthread
*duk__get_temp_hthread(duk_heap
*heap
) {
21 if (heap
->curr_thread
) {
22 return heap
->curr_thread
;
24 return heap
->heap_thread
; /* may be NULL, too */
28 * Marking functions for heap types: mark children recursively
31 DUK_LOCAL
void duk__mark_hstring(duk_heap
*heap
, duk_hstring
*h
) {
35 DUK_DDD(DUK_DDDPRINT("duk__mark_hstring: %p", (void *) h
));
38 /* nothing to process */
41 DUK_LOCAL
void duk__mark_hobject(duk_heap
*heap
, duk_hobject
*h
) {
44 DUK_DDD(DUK_DDDPRINT("duk__mark_hobject: %p", (void *) h
));
48 /* XXX: use advancing pointers instead of index macros -> faster and smaller? */
50 for (i
= 0; i
< (duk_uint_fast32_t
) DUK_HOBJECT_GET_ENEXT(h
); i
++) {
51 duk_hstring
*key
= DUK_HOBJECT_E_GET_KEY(heap
, h
, i
);
55 duk__mark_heaphdr(heap
, (duk_heaphdr
*) key
);
56 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(heap
, h
, i
)) {
57 duk__mark_heaphdr(heap
, (duk_heaphdr
*) DUK_HOBJECT_E_GET_VALUE_PTR(heap
, h
, i
)->a
.get
);
58 duk__mark_heaphdr(heap
, (duk_heaphdr
*) DUK_HOBJECT_E_GET_VALUE_PTR(heap
, h
, i
)->a
.set
);
60 duk__mark_tval(heap
, &DUK_HOBJECT_E_GET_VALUE_PTR(heap
, h
, i
)->v
);
64 for (i
= 0; i
< (duk_uint_fast32_t
) DUK_HOBJECT_GET_ASIZE(h
); i
++) {
65 duk__mark_tval(heap
, DUK_HOBJECT_A_GET_VALUE_PTR(heap
, h
, i
));
68 /* hash part is a 'weak reference' and does not contribute */
70 duk__mark_heaphdr(heap
, (duk_heaphdr
*) DUK_HOBJECT_GET_PROTOTYPE(heap
, h
));
72 if (DUK_HOBJECT_IS_COMPILEDFUNCTION(h
)) {
73 duk_hcompiledfunction
*f
= (duk_hcompiledfunction
*) h
;
74 duk_tval
*tv
, *tv_end
;
75 duk_hobject
**fn
, **fn_end
;
77 /* 'data' is reachable through every compiled function which
78 * contains a reference.
81 duk__mark_heaphdr(heap
, (duk_heaphdr
*) DUK_HCOMPILEDFUNCTION_GET_DATA(heap
, f
));
83 tv
= DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(heap
, f
);
84 tv_end
= DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(heap
, f
);
86 duk__mark_tval(heap
, tv
);
90 fn
= DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(heap
, f
);
91 fn_end
= DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(heap
, f
);
93 duk__mark_heaphdr(heap
, (duk_heaphdr
*) *fn
);
96 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h
)) {
97 duk_hnativefunction
*f
= (duk_hnativefunction
*) h
;
100 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h
)) {
101 duk_hbufferobject
*b
= (duk_hbufferobject
*) h
;
102 duk__mark_heaphdr(heap
, (duk_heaphdr
*) b
->buf
);
103 } else if (DUK_HOBJECT_IS_THREAD(h
)) {
104 duk_hthread
*t
= (duk_hthread
*) h
;
108 while (tv
< t
->valstack_end
) {
109 duk__mark_tval(heap
, tv
);
113 for (i
= 0; i
< (duk_uint_fast32_t
) t
->callstack_top
; i
++) {
114 duk_activation
*act
= t
->callstack
+ i
;
115 duk__mark_heaphdr(heap
, (duk_heaphdr
*) DUK_ACT_GET_FUNC(act
));
116 duk__mark_heaphdr(heap
, (duk_heaphdr
*) act
->var_env
);
117 duk__mark_heaphdr(heap
, (duk_heaphdr
*) act
->lex_env
);
118 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
119 duk__mark_heaphdr(heap
, (duk_heaphdr
*) act
->prev_caller
);
123 #if 0 /* nothing now */
124 for (i
= 0; i
< (duk_uint_fast32_t
) t
->catchstack_top
; i
++) {
125 duk_catcher
*cat
= t
->catchstack
+ i
;
129 duk__mark_heaphdr(heap
, (duk_heaphdr
*) t
->resumer
);
131 /* XXX: duk_small_uint_t would be enough for this loop */
132 for (i
= 0; i
< DUK_NUM_BUILTINS
; i
++) {
133 duk__mark_heaphdr(heap
, (duk_heaphdr
*) t
->builtins
[i
]);
138 /* recursion tracking happens here only */
139 DUK_LOCAL
void duk__mark_heaphdr(duk_heap
*heap
, duk_heaphdr
*h
) {
140 DUK_DDD(DUK_DDDPRINT("duk__mark_heaphdr %p, type %ld",
142 (h
!= NULL
? (long) DUK_HEAPHDR_GET_TYPE(h
) : (long) -1)));
147 if (DUK_HEAPHDR_HAS_REACHABLE(h
)) {
148 DUK_DDD(DUK_DDDPRINT("already marked reachable, skip"));
151 DUK_HEAPHDR_SET_REACHABLE(h
);
153 if (heap
->mark_and_sweep_recursion_depth
>= DUK_USE_MARK_AND_SWEEP_RECLIMIT
) {
154 /* log this with a normal debug level because this should be relatively rare */
155 DUK_D(DUK_DPRINT("mark-and-sweep recursion limit reached, marking as temproot: %p", (void *) h
));
156 DUK_HEAP_SET_MARKANDSWEEP_RECLIMIT_REACHED(heap
);
157 DUK_HEAPHDR_SET_TEMPROOT(h
);
161 heap
->mark_and_sweep_recursion_depth
++;
163 switch ((int) DUK_HEAPHDR_GET_TYPE(h
)) {
164 case DUK_HTYPE_STRING
:
165 duk__mark_hstring(heap
, (duk_hstring
*) h
);
167 case DUK_HTYPE_OBJECT
:
168 duk__mark_hobject(heap
, (duk_hobject
*) h
);
170 case DUK_HTYPE_BUFFER
:
171 /* nothing to mark */
174 DUK_D(DUK_DPRINT("attempt to mark heaphdr %p with invalid htype %ld", (void *) h
, (long) DUK_HEAPHDR_GET_TYPE(h
)));
178 heap
->mark_and_sweep_recursion_depth
--;
181 DUK_LOCAL
void duk__mark_tval(duk_heap
*heap
, duk_tval
*tv
) {
182 DUK_DDD(DUK_DDDPRINT("duk__mark_tval %p", (void *) tv
));
186 if (DUK_TVAL_IS_HEAP_ALLOCATED(tv
)) {
187 duk__mark_heaphdr(heap
, DUK_TVAL_GET_HEAPHDR(tv
));
195 DUK_LOCAL
void duk__mark_roots_heap(duk_heap
*heap
) {
198 DUK_DD(DUK_DDPRINT("duk__mark_roots_heap: %p", (void *) heap
));
200 duk__mark_heaphdr(heap
, (duk_heaphdr
*) heap
->heap_thread
);
201 duk__mark_heaphdr(heap
, (duk_heaphdr
*) heap
->heap_object
);
203 for (i
= 0; i
< DUK_HEAP_NUM_STRINGS
; i
++) {
204 duk_hstring
*h
= DUK_HEAP_GET_STRING(heap
, i
);
205 duk__mark_heaphdr(heap
, (duk_heaphdr
*) h
);
208 duk__mark_tval(heap
, &heap
->lj
.value1
);
209 duk__mark_tval(heap
, &heap
->lj
.value2
);
211 #if defined(DUK_USE_DEBUGGER_SUPPORT)
212 for (i
= 0; i
< heap
->dbg_breakpoint_count
; i
++) {
213 duk__mark_heaphdr(heap
, (duk_heaphdr
*) heap
->dbg_breakpoints
[i
].filename
);
219 * Mark refzero_list objects.
221 * Objects on the refzero_list have no inbound references. They might have
222 * outbound references to objects that we might free, which would invalidate
223 * any references held by the refzero objects. A refzero object might also
224 * be rescued by refcount finalization. Refzero objects are treated as
225 * reachability roots to ensure they (or anything they point to) are not
226 * freed in mark-and-sweep.
229 #ifdef DUK_USE_REFERENCE_COUNTING
230 DUK_LOCAL
void duk__mark_refzero_list(duk_heap
*heap
) {
233 DUK_DD(DUK_DDPRINT("duk__mark_refzero_list: %p", (void *) heap
));
235 hdr
= heap
->refzero_list
;
237 duk__mark_heaphdr(heap
, hdr
);
238 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
244 * Mark unreachable, finalizable objects.
246 * Such objects will be moved aside and their finalizers run later. They have
247 * to be treated as reachability roots for their properties etc to remain
248 * allocated. This marking is only done for unreachable values which would
249 * be swept later (refzero_list is thus excluded).
251 * Objects are first marked FINALIZABLE and only then marked as reachability
252 * roots; otherwise circular references might be handled inconsistently.
255 DUK_LOCAL
void duk__mark_finalizable(duk_heap
*heap
) {
258 duk_size_t count_finalizable
= 0;
260 DUK_DD(DUK_DDPRINT("duk__mark_finalizable: %p", (void *) heap
));
262 thr
= duk__get_temp_hthread(heap
);
263 DUK_ASSERT(thr
!= NULL
);
265 hdr
= heap
->heap_allocated
;
267 /* A finalizer is looked up from the object and up its prototype chain
268 * (which allows inherited finalizers). A prototype loop must not cause
269 * an error to be thrown here; duk_hobject_hasprop_raw() will ignore a
270 * prototype loop silently and indicate that the property doesn't exist.
273 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr
) &&
274 DUK_HEAPHDR_GET_TYPE(hdr
) == DUK_HTYPE_OBJECT
&&
275 !DUK_HEAPHDR_HAS_FINALIZED(hdr
) &&
276 duk_hobject_hasprop_raw(thr
, (duk_hobject
*) hdr
, DUK_HTHREAD_STRING_INT_FINALIZER(thr
))) {
281 * - is not a finalized object
285 DUK_DD(DUK_DDPRINT("unreachable heap object will be "
286 "finalized -> mark as finalizable "
287 "and treat as a reachability root: %p",
289 DUK_HEAPHDR_SET_FINALIZABLE(hdr
);
290 count_finalizable
++;
293 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
296 if (count_finalizable
== 0) {
300 DUK_DD(DUK_DDPRINT("marked %ld heap objects as finalizable, now mark them reachable",
301 (long) count_finalizable
));
303 hdr
= heap
->heap_allocated
;
305 if (DUK_HEAPHDR_HAS_FINALIZABLE(hdr
)) {
306 duk__mark_heaphdr(heap
, hdr
);
309 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
312 /* Caller will finish the marking process if we hit a recursion limit. */
316 * Mark objects on finalize_list.
320 DUK_LOCAL
void duk__mark_finalize_list(duk_heap
*heap
) {
323 duk_size_t count_finalize_list
= 0;
326 DUK_DD(DUK_DDPRINT("duk__mark_finalize_list: %p", (void *) heap
));
328 hdr
= heap
->finalize_list
;
330 duk__mark_heaphdr(heap
, hdr
);
331 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
333 count_finalize_list
++;
338 if (count_finalize_list
> 0) {
339 DUK_D(DUK_DPRINT("marked %ld objects on the finalize_list as reachable (previous finalizer run skipped)",
340 (long) count_finalize_list
));
346 * Fallback marking handler if recursion limit is reached.
348 * Iterates 'temproots' until recursion limit is no longer hit. Note
349 * that temproots may reside either in heap allocated list or the
350 * refzero work list. This is a slow scan, but guarantees that we
351 * finish with a bounded C stack.
353 * Note that nodes may have been marked as temproots before this
354 * scan begun, OR they may have been marked during the scan (as
355 * we process nodes recursively also during the scan). This is
360 DUK_LOCAL
void duk__handle_temproot(duk_heap
*heap
, duk_heaphdr
*hdr
, duk_size_t
*count
) {
362 DUK_LOCAL
void duk__handle_temproot(duk_heap
*heap
, duk_heaphdr
*hdr
) {
364 if (!DUK_HEAPHDR_HAS_TEMPROOT(hdr
)) {
365 DUK_DDD(DUK_DDDPRINT("not a temp root: %p", (void *) hdr
));
369 DUK_DDD(DUK_DDDPRINT("found a temp root: %p", (void *) hdr
));
370 DUK_HEAPHDR_CLEAR_TEMPROOT(hdr
);
371 DUK_HEAPHDR_CLEAR_REACHABLE(hdr
); /* done so that duk__mark_heaphdr() works correctly */
372 duk__mark_heaphdr(heap
, hdr
);
379 DUK_LOCAL
void duk__mark_temproots_by_heap_scan(duk_heap
*heap
) {
385 DUK_DD(DUK_DDPRINT("duk__mark_temproots_by_heap_scan: %p", (void *) heap
));
387 while (DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap
)) {
388 DUK_DD(DUK_DDPRINT("recursion limit reached, doing heap scan to continue from temproots"));
393 DUK_HEAP_CLEAR_MARKANDSWEEP_RECLIMIT_REACHED(heap
);
395 hdr
= heap
->heap_allocated
;
398 duk__handle_temproot(heap
, hdr
, &count
);
400 duk__handle_temproot(heap
, hdr
);
402 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
405 /* must also check refzero_list */
406 #ifdef DUK_USE_REFERENCE_COUNTING
407 hdr
= heap
->refzero_list
;
410 duk__handle_temproot(heap
, hdr
, &count
);
412 duk__handle_temproot(heap
, hdr
);
414 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
416 #endif /* DUK_USE_REFERENCE_COUNTING */
419 DUK_DD(DUK_DDPRINT("temproot mark heap scan processed %ld temp roots", (long) count
));
425 * Finalize refcounts for heap elements just about to be freed.
426 * This must be done for all objects before freeing to avoid any
427 * stale pointer dereferences.
429 * Note that this must deduce the set of objects to be freed
430 * identically to duk__sweep_heap().
433 #ifdef DUK_USE_REFERENCE_COUNTING
434 DUK_LOCAL
void duk__finalize_refcounts(duk_heap
*heap
) {
438 thr
= duk__get_temp_hthread(heap
);
439 DUK_ASSERT(thr
!= NULL
);
441 DUK_DD(DUK_DDPRINT("duk__finalize_refcounts: heap=%p, hthread=%p",
442 (void *) heap
, (void *) thr
));
444 hdr
= heap
->heap_allocated
;
446 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr
)) {
448 * Unreachable object about to be swept. Finalize target refcounts
449 * (objects which the unreachable object points to) without doing
450 * refzero processing. Recursive decrefs are also prevented when
451 * refzero processing is disabled.
453 * Value cannot be a finalizable object, as they have been made
454 * temporarily reachable for this round.
457 DUK_DDD(DUK_DDDPRINT("unreachable object, refcount finalize before sweeping: %p", (void *) hdr
));
458 duk_heaphdr_refcount_finalize(thr
, hdr
);
461 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
464 #endif /* DUK_USE_REFERENCE_COUNTING */
467 * Clear (reachable) flags of refzero work list.
470 #ifdef DUK_USE_REFERENCE_COUNTING
471 DUK_LOCAL
void duk__clear_refzero_list_flags(duk_heap
*heap
) {
474 DUK_DD(DUK_DDPRINT("duk__clear_refzero_list_flags: %p", (void *) heap
));
476 hdr
= heap
->refzero_list
;
478 DUK_HEAPHDR_CLEAR_REACHABLE(hdr
);
479 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr
));
480 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr
));
481 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr
));
482 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
485 #endif /* DUK_USE_REFERENCE_COUNTING */
488 * Clear (reachable) flags of finalize_list
490 * We could mostly do in the sweep phase when we move objects from the
491 * heap into the finalize_list. However, if a finalizer run is skipped
492 * during a mark-and-sweep, the objects on the finalize_list will be marked
493 * reachable during the next mark-and-sweep. Since they're already on the
494 * finalize_list, no-one will be clearing their REACHABLE flag so we do it
495 * here. (This now overlaps with the sweep handling in a harmless way.)
498 DUK_LOCAL
void duk__clear_finalize_list_flags(duk_heap
*heap
) {
501 DUK_DD(DUK_DDPRINT("duk__clear_finalize_list_flags: %p", (void *) heap
));
503 hdr
= heap
->finalize_list
;
505 DUK_HEAPHDR_CLEAR_REACHABLE(hdr
);
506 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr
));
507 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr
));
508 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr
));
509 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
517 #if defined(DUK_USE_STRTAB_CHAIN)
519 /* XXX: skip count_free w/o debug? */
520 #if defined(DUK_USE_HEAPPTR16)
521 DUK_LOCAL
void duk__sweep_string_chain16(duk_heap
*heap
, duk_uint16_t
*slot
, duk_size_t
*count_keep
, duk_size_t
*count_free
) {
522 duk_uint16_t h16
= *slot
;
524 duk_uint16_t null16
= heap
->heapptr_null16
;
530 h
= (duk_hstring
*) DUK_USE_HEAPPTR_DEC16(heap
->heap_udata
, h16
);
531 DUK_ASSERT(h
!= NULL
);
533 if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr
*) h
)) {
534 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr
*) h
);
537 #if defined(DUK_USE_REFERENCE_COUNTING)
538 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr
*) h
) == 0);
540 /* deal with weak references first */
541 duk_heap_strcache_string_remove(heap
, (duk_hstring
*) h
);
544 /* free inner references (these exist e.g. when external
545 * strings are enabled)
547 duk_free_hstring_inner(heap
, h
);
552 #else /* DUK_USE_HEAPPTR16 */
553 DUK_LOCAL
void duk__sweep_string_chain(duk_heap
*heap
, duk_hstring
**slot
, duk_size_t
*count_keep
, duk_size_t
*count_free
) {
554 duk_hstring
*h
= *slot
;
561 if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr
*) h
)) {
562 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr
*) h
);
565 #if defined(DUK_USE_REFERENCE_COUNTING)
566 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr
*) h
) == 0);
568 /* deal with weak references first */
569 duk_heap_strcache_string_remove(heap
, (duk_hstring
*) h
);
572 /* free inner references (these exist e.g. when external
573 * strings are enabled)
575 duk_free_hstring_inner(heap
, h
);
580 #endif /* DUK_USE_HEAPPTR16 */
582 DUK_LOCAL
void duk__sweep_stringtable_chain(duk_heap
*heap
, duk_size_t
*out_count_keep
) {
585 duk_size_t count_free
= 0;
586 duk_size_t count_keep
= 0;
588 #if defined(DUK_USE_HEAPPTR16)
594 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap
));
596 /* Non-zero refcounts should not happen for unreachable strings,
597 * because we refcount finalize all unreachable objects which
598 * should have decreased unreachable string refcounts to zero
602 for (i
= 0; i
< DUK_STRTAB_CHAIN_SIZE
; i
++) {
603 e
= heap
->strtable
+ i
;
604 if (e
->listlen
== 0) {
605 #if defined(DUK_USE_HEAPPTR16)
606 duk__sweep_string_chain16(heap
, &e
->u
.str16
, &count_keep
, &count_free
);
608 duk__sweep_string_chain(heap
, &e
->u
.str
, &count_keep
, &count_free
);
611 #if defined(DUK_USE_HEAPPTR16)
612 lst
= (duk_uint16_t
*) DUK_USE_HEAPPTR_DEC16(heap
->heap_udata
, e
->u
.strlist16
);
616 for (j
= 0, n
= e
->listlen
; j
< n
; j
++) {
617 #if defined(DUK_USE_HEAPPTR16)
618 duk__sweep_string_chain16(heap
, lst
+ j
, &count_keep
, &count_free
);
620 duk__sweep_string_chain(heap
, lst
+ j
, &count_keep
, &count_free
);
626 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
627 (long) count_free
, (long) count_keep
));
628 *out_count_keep
= count_keep
;
630 #endif /* DUK_USE_STRTAB_CHAIN */
632 #if defined(DUK_USE_STRTAB_PROBE)
633 DUK_LOCAL
void duk__sweep_stringtable_probe(duk_heap
*heap
, duk_size_t
*out_count_keep
) {
637 duk_size_t count_free
= 0;
639 duk_size_t count_keep
= 0;
641 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap
));
643 for (i
= 0; i
< heap
->st_size
; i
++) {
644 #if defined(DUK_USE_HEAPPTR16)
645 h
= (duk_hstring
*) DUK_USE_HEAPPTR_DEC16(heap
->strtable16
[i
]);
647 h
= heap
->strtable
[i
];
649 if (h
== NULL
|| h
== DUK_STRTAB_DELETED_MARKER(heap
)) {
651 } else if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr
*) h
)) {
652 DUK_HEAPHDR_CLEAR_REACHABLE((duk_heaphdr
*) h
);
661 #if defined(DUK_USE_REFERENCE_COUNTING)
662 /* Non-zero refcounts should not happen for unreachable strings,
663 * because we refcount finalize all unreachable objects which
664 * should have decreased unreachable string refcounts to zero
667 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT((duk_heaphdr
*) h
) == 0);
670 DUK_DDD(DUK_DDDPRINT("sweep string, not reachable: %p", (void *) h
));
672 /* deal with weak references first */
673 duk_heap_strcache_string_remove(heap
, (duk_hstring
*) h
);
675 /* remove the string (mark DELETED), could also call
676 * duk_heap_string_remove() but that would be slow and
677 * pointless because we already know the slot.
679 #if defined(DUK_USE_HEAPPTR16)
680 heap
->strtable16
[i
] = heap
->heapptr_deleted16
;
682 heap
->strtable
[i
] = DUK_STRTAB_DELETED_MARKER(heap
);
685 /* free inner references (these exist e.g. when external
686 * strings are enabled)
688 duk_free_hstring_inner(heap
, (duk_hstring
*) h
);
690 /* finally free the struct itself */
695 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
696 (long) count_free
, (long) count_keep
));
698 *out_count_keep
= count_keep
;
700 #endif /* DUK_USE_STRTAB_PROBE */
706 DUK_LOCAL
void duk__sweep_heap(duk_heap
*heap
, duk_int_t flags
, duk_size_t
*out_count_keep
) {
707 duk_heaphdr
*prev
; /* last element that was left in the heap */
711 duk_size_t count_free
= 0;
712 duk_size_t count_finalize
= 0;
713 duk_size_t count_rescue
= 0;
715 duk_size_t count_keep
= 0;
718 DUK_DD(DUK_DDPRINT("duk__sweep_heap: %p", (void *) heap
));
721 curr
= heap
->heap_allocated
;
722 heap
->heap_allocated
= NULL
;
724 /* strings are never placed on the heap allocated list */
725 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr
) != DUK_HTYPE_STRING
);
727 next
= DUK_HEAPHDR_GET_NEXT(heap
, curr
);
729 if (DUK_HEAPHDR_HAS_REACHABLE(curr
)) {
731 * Reachable object, keep
734 DUK_DDD(DUK_DDDPRINT("sweep, reachable: %p", (void *) curr
));
736 if (DUK_HEAPHDR_HAS_FINALIZABLE(curr
)) {
738 * If object has been marked finalizable, move it to the
739 * "to be finalized" work list. It will be collected on
740 * the next mark-and-sweep if it is still unreachable
741 * after running the finalizer.
744 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr
));
745 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr
) == DUK_HTYPE_OBJECT
);
746 DUK_DDD(DUK_DDDPRINT("object has finalizer, move to finalization work list: %p", (void *) curr
));
748 #ifdef DUK_USE_DOUBLE_LINKED_HEAP
749 if (heap
->finalize_list
) {
750 DUK_HEAPHDR_SET_PREV(heap
, heap
->finalize_list
, curr
);
752 DUK_HEAPHDR_SET_PREV(heap
, curr
, NULL
);
754 DUK_HEAPHDR_SET_NEXT(heap
, curr
, heap
->finalize_list
);
755 heap
->finalize_list
= curr
;
761 * Object will be kept; queue object back to heap_allocated (to tail)
764 if (DUK_HEAPHDR_HAS_FINALIZED(curr
)) {
766 * Object's finalizer was executed on last round, and
767 * object has been happily rescued.
770 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr
));
771 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr
) == DUK_HTYPE_OBJECT
);
772 DUK_DD(DUK_DDPRINT("object rescued during mark-and-sweep finalization: %p", (void *) curr
));
778 * Plain, boring reachable object.
783 if (!heap
->heap_allocated
) {
784 heap
->heap_allocated
= curr
;
787 DUK_HEAPHDR_SET_NEXT(heap
, prev
, curr
);
789 #ifdef DUK_USE_DOUBLE_LINKED_HEAP
790 DUK_HEAPHDR_SET_PREV(heap
, curr
, prev
);
795 DUK_HEAPHDR_CLEAR_REACHABLE(curr
);
796 DUK_HEAPHDR_CLEAR_FINALIZED(curr
);
797 DUK_HEAPHDR_CLEAR_FINALIZABLE(curr
);
799 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr
));
800 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr
));
801 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr
));
806 * Unreachable object, free
809 DUK_DDD(DUK_DDDPRINT("sweep, not reachable: %p", (void *) curr
));
811 #if defined(DUK_USE_REFERENCE_COUNTING)
812 /* Non-zero refcounts should not happen because we refcount
813 * finalize all unreachable objects which should cancel out
814 * refcounts (even for cycles).
816 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(curr
) == 0);
818 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr
));
820 if (DUK_HEAPHDR_HAS_FINALIZED(curr
)) {
821 DUK_DDD(DUK_DDDPRINT("finalized object not rescued: %p", (void *) curr
));
824 /* Note: object cannot be a finalizable unreachable object, as
825 * they have been marked temporarily reachable for this round,
826 * and are handled above.
833 /* weak refs should be handled here, but no weak refs for
834 * any non-string objects exist right now.
837 /* free object and all auxiliary (non-heap) allocs */
838 duk_heap_free_heaphdr_raw(heap
, curr
);
844 DUK_HEAPHDR_SET_NEXT(heap
, prev
, NULL
);
848 DUK_D(DUK_DPRINT("mark-and-sweep sweep objects (non-string): %ld freed, %ld kept, %ld rescued, %ld queued for finalization",
849 (long) count_free
, (long) count_keep
, (long) count_rescue
, (long) count_finalize
));
851 *out_count_keep
= count_keep
;
855 * Run (object) finalizers in the "to be finalized" work list.
858 DUK_LOCAL
void duk__run_object_finalizers(duk_heap
*heap
) {
862 duk_size_t count
= 0;
866 DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap
));
868 thr
= duk__get_temp_hthread(heap
);
869 DUK_ASSERT(thr
!= NULL
);
871 curr
= heap
->finalize_list
;
873 DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr
));
875 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr
) == DUK_HTYPE_OBJECT
); /* only objects have finalizers */
876 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr
)); /* flags have been already cleared */
877 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(curr
));
878 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(curr
));
879 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(curr
));
881 /* run the finalizer */
882 duk_hobject_run_finalizer(thr
, (duk_hobject
*) curr
); /* must never longjmp */
884 /* mark FINALIZED, for next mark-and-sweep (will collect unless has become reachable;
885 * prevent running finalizer again if reachable)
887 DUK_HEAPHDR_SET_FINALIZED(curr
);
889 /* queue back to heap_allocated */
890 next
= DUK_HEAPHDR_GET_NEXT(heap
, curr
);
891 DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap
, curr
);
899 /* finalize_list will always be processed completely */
900 heap
->finalize_list
= NULL
;
903 DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %ld finalizers called", (long) count
));
910 * Compaction is assumed to never throw an error.
913 DUK_LOCAL
int duk__protected_compact_object(duk_context
*ctx
) {
914 /* XXX: for threads, compact value stack, call stack, catch stack? */
916 duk_hobject
*obj
= duk_get_hobject(ctx
, -1);
917 DUK_ASSERT(obj
!= NULL
);
918 duk_hobject_compact_props((duk_hthread
*) ctx
, obj
);
923 DUK_LOCAL
void duk__compact_object_list(duk_heap
*heap
, duk_hthread
*thr
, duk_heaphdr
*start
, duk_size_t
*p_count_check
, duk_size_t
*p_count_compact
, duk_size_t
*p_count_bytes_saved
) {
925 DUK_LOCAL
void duk__compact_object_list(duk_heap
*heap
, duk_hthread
*thr
, duk_heaphdr
*start
) {
929 duk_size_t old_size
, new_size
;
937 DUK_DDD(DUK_DDDPRINT("mark-and-sweep compact: %p", (void *) curr
));
939 if (DUK_HEAPHDR_GET_TYPE(curr
) != DUK_HTYPE_OBJECT
) {
942 obj
= (duk_hobject
*) curr
;
945 old_size
= DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj
),
946 DUK_HOBJECT_GET_ASIZE(obj
),
947 DUK_HOBJECT_GET_HSIZE(obj
));
950 DUK_DD(DUK_DDPRINT("compact object: %p", (void *) obj
));
951 duk_push_hobject((duk_context
*) thr
, obj
);
952 /* XXX: disable error handlers for duration of compaction? */
953 duk_safe_call((duk_context
*) thr
, duk__protected_compact_object
, 1, 0);
956 new_size
= DUK_HOBJECT_P_COMPUTE_SIZE(DUK_HOBJECT_GET_ESIZE(obj
),
957 DUK_HOBJECT_GET_ASIZE(obj
),
958 DUK_HOBJECT_GET_HSIZE(obj
));
962 (*p_count_compact
)++;
963 (*p_count_bytes_saved
) += (duk_size_t
) (old_size
- new_size
);
967 curr
= DUK_HEAPHDR_GET_NEXT(heap
, curr
);
974 DUK_LOCAL
void duk__compact_objects(duk_heap
*heap
) {
975 /* XXX: which lists should participate? to be finalized? */
977 duk_size_t count_check
= 0;
978 duk_size_t count_compact
= 0;
979 duk_size_t count_bytes_saved
= 0;
983 DUK_DD(DUK_DDPRINT("duk__compact_objects: %p", (void *) heap
));
985 thr
= duk__get_temp_hthread(heap
);
986 DUK_ASSERT(thr
!= NULL
);
989 duk__compact_object_list(heap
, thr
, heap
->heap_allocated
, &count_check
, &count_compact
, &count_bytes_saved
);
990 duk__compact_object_list(heap
, thr
, heap
->finalize_list
, &count_check
, &count_compact
, &count_bytes_saved
);
991 #ifdef DUK_USE_REFERENCE_COUNTING
992 duk__compact_object_list(heap
, thr
, heap
->refzero_list
, &count_check
, &count_compact
, &count_bytes_saved
);
995 duk__compact_object_list(heap
, thr
, heap
->heap_allocated
);
996 duk__compact_object_list(heap
, thr
, heap
->finalize_list
);
997 #ifdef DUK_USE_REFERENCE_COUNTING
998 duk__compact_object_list(heap
, thr
, heap
->refzero_list
);
1002 #ifdef DUK_USE_DEBUG
1003 DUK_D(DUK_DPRINT("mark-and-sweep compact objects: %ld checked, %ld compaction attempts, %ld bytes saved by compaction",
1004 (long) count_check
, (long) count_compact
, (long) count_bytes_saved
));
1009 * Assertion helpers.
1012 #ifdef DUK_USE_ASSERTIONS
1013 DUK_LOCAL
void duk__assert_heaphdr_flags(duk_heap
*heap
) {
1016 hdr
= heap
->heap_allocated
;
1018 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(hdr
));
1019 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr
));
1020 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr
));
1021 /* may have FINALIZED */
1022 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
1025 #ifdef DUK_USE_REFERENCE_COUNTING
1026 hdr
= heap
->refzero_list
;
1028 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(hdr
));
1029 DUK_ASSERT(!DUK_HEAPHDR_HAS_TEMPROOT(hdr
));
1030 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZABLE(hdr
));
1031 DUK_ASSERT(!DUK_HEAPHDR_HAS_FINALIZED(hdr
));
1032 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
1034 #endif /* DUK_USE_REFERENCE_COUNTING */
1037 #ifdef DUK_USE_REFERENCE_COUNTING
1038 DUK_LOCAL
void duk__assert_valid_refcounts(duk_heap
*heap
) {
1039 duk_heaphdr
*hdr
= heap
->heap_allocated
;
1041 if (DUK_HEAPHDR_GET_REFCOUNT(hdr
) == 0 &&
1042 DUK_HEAPHDR_HAS_FINALIZED(hdr
)) {
1043 /* An object may be in heap_allocated list with a zero
1044 * refcount if it has just been finalized and is waiting
1045 * to be collected by the next cycle.
1047 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr
) == 0) {
1048 /* An object may be in heap_allocated list with a zero
1049 * refcount also if it is a temporary object created by
1050 * a finalizer; because finalization now runs inside
1051 * mark-and-sweep, such objects will not be queued to
1052 * refzero_list and will thus appear here with refcount
1055 #if 0 /* this case can no longer occur because refcount is unsigned */
1056 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr
) < 0) {
1057 DUK_D(DUK_DPRINT("invalid refcount: %ld, %p -> %!O",
1058 (hdr
!= NULL
? (long) DUK_HEAPHDR_GET_REFCOUNT(hdr
) : (long) 0),
1059 (void *) hdr
, (duk_heaphdr
*) hdr
));
1060 DUK_ASSERT(DUK_HEAPHDR_GET_REFCOUNT(hdr
) > 0);
1063 hdr
= DUK_HEAPHDR_GET_NEXT(heap
, hdr
);
1066 #endif /* DUK_USE_REFERENCE_COUNTING */
1067 #endif /* DUK_USE_ASSERTIONS */
1070 * Finalizer torture. Do one fake finalizer call which causes side effects
1071 * similar to one or more finalizers on actual objects.
1074 #if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1075 DUK_LOCAL duk_ret_t
duk__markandsweep_fake_finalizer(duk_context
*ctx
) {
1076 DUK_D(DUK_DPRINT("fake mark-and-sweep torture finalizer executed"));
1078 /* Require a lot of stack to force a value stack grow/shrink.
1079 * Recursive mark-and-sweep is prevented by allocation macros
1080 * so this won't trigger another mark-and-sweep.
1082 duk_require_stack(ctx
, 100000);
1084 /* XXX: do something to force a callstack grow/shrink, perhaps
1085 * just a manual forced resize or a forced relocating realloc?
1091 DUK_LOCAL
void duk__markandsweep_torture_finalizer(duk_hthread
*thr
) {
1095 DUK_ASSERT(thr
!= NULL
);
1096 ctx
= (duk_context
*) thr
;
1098 /* Avoid fake finalization when callstack limit has been reached.
1099 * Otherwise a callstack limit error will be created, then refzero'ed.
1101 if (thr
->heap
->call_recursion_depth
>= thr
->heap
->call_recursion_limit
||
1102 thr
->callstack_size
+ 2 * DUK_CALLSTACK_GROW_STEP
>= thr
->callstack_max
/*approximate*/) {
1103 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake mark-and-sweep torture finalizer"));
1107 /* Run fake finalizer. Avoid creating unnecessary garbage. */
1108 duk_push_c_function(ctx
, duk__markandsweep_fake_finalizer
, 0 /*nargs*/);
1109 rc
= duk_pcall(ctx
, 0 /*nargs*/);
1110 DUK_UNREF(rc
); /* ignored */
1113 #endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1116 * Main mark-and-sweep function.
1118 * 'flags' represents the features requested by the caller. The current
1119 * heap->mark_and_sweep_base_flags is ORed automatically into the flags;
1120 * the base flags mask typically prevents certain mark-and-sweep operations
1124 DUK_INTERNAL duk_bool_t
duk_heap_mark_and_sweep(duk_heap
*heap
, duk_small_uint_t flags
) {
1126 duk_size_t count_keep_obj
;
1127 duk_size_t count_keep_str
;
1128 #ifdef DUK_USE_VOLUNTARY_GC
1132 /* XXX: thread selection for mark-and-sweep is currently a hack.
1133 * If we don't have a thread, the entire mark-and-sweep is now
1134 * skipped (although we could just skip finalizations).
1136 /* XXX: if thr != NULL, the thr may still be in the middle of
1137 * initialization; improve the thread viability test.
1139 thr
= duk__get_temp_hthread(heap
);
1141 DUK_D(DUK_DPRINT("temporary hack: gc skipped because we don't have a temp thread"));
1143 /* reset voluntary gc trigger count */
1144 #ifdef DUK_USE_VOLUNTARY_GC
1145 heap
->mark_and_sweep_trigger_counter
= DUK_HEAP_MARK_AND_SWEEP_TRIGGER_SKIP
;
1150 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) starting, requested flags: 0x%08lx, effective flags: 0x%08lx",
1151 (unsigned long) flags
, (unsigned long) (flags
| heap
->mark_and_sweep_base_flags
)));
1153 flags
|= heap
->mark_and_sweep_base_flags
;
1159 #ifdef DUK_USE_ASSERTIONS
1160 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap
));
1161 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap
));
1162 DUK_ASSERT(heap
->mark_and_sweep_recursion_depth
== 0);
1163 duk__assert_heaphdr_flags(heap
);
1164 #ifdef DUK_USE_REFERENCE_COUNTING
1165 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1166 * finalizer may trigger a mark-and-sweep.
1168 duk__assert_valid_refcounts(heap
);
1169 #endif /* DUK_USE_REFERENCE_COUNTING */
1170 #endif /* DUK_USE_ASSERTIONS */
1176 DUK_HEAP_SET_MARKANDSWEEP_RUNNING(heap
);
1179 * Mark roots, hoping that recursion limit is not normally hit.
1180 * If recursion limit is hit, run additional reachability rounds
1181 * starting from "temproots" until marking is complete.
1183 * Marking happens in two phases: first we mark actual reachability
1184 * roots (and run "temproots" to complete the process). Then we
1185 * check which objects are unreachable and are finalizable; such
1186 * objects are marked as FINALIZABLE and marked as reachability
1187 * (and "temproots" is run again to complete the process).
1189 * The heap finalize_list must also be marked as a reachability root.
1190 * There may be objects on the list from a previous round if the
1191 * previous run had finalizer skip flag.
1194 duk__mark_roots_heap(heap
); /* main reachability roots */
1195 #ifdef DUK_USE_REFERENCE_COUNTING
1196 duk__mark_refzero_list(heap
); /* refzero_list treated as reachability roots */
1198 duk__mark_temproots_by_heap_scan(heap
); /* temproots */
1200 duk__mark_finalizable(heap
); /* mark finalizable as reachability roots */
1201 duk__mark_finalize_list(heap
); /* mark finalizer work list as reachability roots */
1202 duk__mark_temproots_by_heap_scan(heap
); /* temproots */
1205 * Sweep garbage and remove marking flags, and move objects with
1206 * finalizers to the finalizer work list.
1208 * Objects to be swept need to get their refcounts finalized before
1209 * they are swept. In other words, their target object refcounts
1210 * need to be decreased. This has to be done before freeing any
1211 * objects to avoid decref'ing dangling pointers (which may happen
1212 * even without bugs, e.g. with reference loops)
1214 * Because strings don't point to other heap objects, similar
1215 * finalization is not necessary for strings.
1218 /* XXX: more emergency behavior, e.g. find smaller hash sizes etc */
1220 #ifdef DUK_USE_REFERENCE_COUNTING
1221 duk__finalize_refcounts(heap
);
1223 duk__sweep_heap(heap
, flags
, &count_keep_obj
);
1224 #if defined(DUK_USE_STRTAB_CHAIN)
1225 duk__sweep_stringtable_chain(heap
, &count_keep_str
);
1226 #elif defined(DUK_USE_STRTAB_PROBE)
1227 duk__sweep_stringtable_probe(heap
, &count_keep_str
);
1229 #error internal error, invalid strtab options
1231 #ifdef DUK_USE_REFERENCE_COUNTING
1232 duk__clear_refzero_list_flags(heap
);
1234 duk__clear_finalize_list_flags(heap
);
1237 * Object compaction (emergency only).
1239 * Object compaction is a separate step after sweeping, as there is
1240 * more free memory for it to work with. Also, currently compaction
1241 * may insert new objects into the heap allocated list and the string
1242 * table which we don't want to do during a sweep (the reachability
1243 * flags of such objects would be incorrect). The objects inserted
1246 * - a temporary duk_hbuffer for a new properties allocation
1247 * - if array part is abandoned, string keys are interned
1249 * The object insertions go to the front of the list, so they do not
1250 * cause an infinite loop (they are not compacted).
1253 if ((flags
& DUK_MS_FLAG_EMERGENCY
) &&
1254 !(flags
& DUK_MS_FLAG_NO_OBJECT_COMPACTION
)) {
1255 duk__compact_objects(heap
);
1259 * String table resize check.
1261 * Note: this may silently (and safely) fail if GC is caused by an
1262 * allocation call in stringtable resize_hash(). Resize_hash()
1263 * will prevent a recursive call to itself by setting the
1264 * DUK_MS_FLAG_NO_STRINGTABLE_RESIZE in heap->mark_and_sweep_base_flags.
1267 /* XXX: stringtable emergency compaction? */
1269 #if defined(DUK_USE_MS_STRINGTABLE_RESIZE)
1270 if (!(flags
& DUK_MS_FLAG_NO_STRINGTABLE_RESIZE
)) {
1271 DUK_DD(DUK_DDPRINT("resize stringtable: %p", (void *) heap
));
1272 duk_heap_force_strtab_resize(heap
);
1274 DUK_D(DUK_DPRINT("stringtable resize skipped because DUK_MS_FLAG_NO_STRINGTABLE_RESIZE is set"));
1279 * Finalize objects in the finalization work list. Finalized
1280 * objects are queued back to heap_allocated with FINALIZED set.
1282 * Since finalizers may cause arbitrary side effects, they are
1283 * prevented during string table and object property allocation
1284 * resizing using the DUK_MS_FLAG_NO_FINALIZERS flag in
1285 * heap->mark_and_sweep_base_flags. In this case the objects
1286 * remain in the finalization work list after mark-and-sweep
1287 * exits and they may be finalized on the next pass.
1289 * Finalization currently happens inside "MARKANDSWEEP_RUNNING"
1290 * protection (no mark-and-sweep may be triggered by the
1291 * finalizers). As a side effect:
1293 * 1) an out-of-memory error inside a finalizer will not
1294 * cause a mark-and-sweep and may cause the finalizer
1295 * to fail unnecessarily
1297 * 2) any temporary objects whose refcount decreases to zero
1298 * during finalization will not be put into refzero_list;
1299 * they can only be collected by another mark-and-sweep
1301 * This is not optimal, but since the sweep for this phase has
1302 * already happened, this is probably good enough for now.
1305 #if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1306 /* Cannot simulate individual finalizers because finalize_list only
1307 * contains objects with actual finalizers. But simulate side effects
1308 * from finalization by doing a bogus function call and resizing the
1311 if (flags
& DUK_MS_FLAG_NO_FINALIZERS
) {
1312 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, DUK_MS_FLAG_NO_FINALIZERS is set"));
1313 } else if (!(thr
->valstack
!= NULL
&& thr
->callstack
!= NULL
&& thr
->catchstack
!= NULL
)) {
1314 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, thread not yet viable"));
1316 DUK_D(DUK_DPRINT("run mark-and-sweep torture finalizer"));
1317 duk__markandsweep_torture_finalizer(thr
);
1319 #endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1321 if (flags
& DUK_MS_FLAG_NO_FINALIZERS
) {
1322 DUK_D(DUK_DPRINT("finalizer run skipped because DUK_MS_FLAG_NO_FINALIZERS is set"));
1324 duk__run_object_finalizers(heap
);
1331 DUK_HEAP_CLEAR_MARKANDSWEEP_RUNNING(heap
);
1337 #ifdef DUK_USE_ASSERTIONS
1338 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap
));
1339 DUK_ASSERT(!DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap
));
1340 DUK_ASSERT(heap
->mark_and_sweep_recursion_depth
== 0);
1341 duk__assert_heaphdr_flags(heap
);
1342 #ifdef DUK_USE_REFERENCE_COUNTING
1343 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1344 * finalizer may trigger a mark-and-sweep.
1346 duk__assert_valid_refcounts(heap
);
1347 #endif /* DUK_USE_REFERENCE_COUNTING */
1348 #endif /* DUK_USE_ASSERTIONS */
1351 * Reset trigger counter
1354 #ifdef DUK_USE_VOLUNTARY_GC
1355 tmp
= (count_keep_obj
+ count_keep_str
) / 256;
1356 heap
->mark_and_sweep_trigger_counter
= (duk_int_t
) (
1357 (tmp
* DUK_HEAP_MARK_AND_SWEEP_TRIGGER_MULT
) +
1358 DUK_HEAP_MARK_AND_SWEEP_TRIGGER_ADD
);
1359 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, trigger reset to %ld",
1360 (long) count_keep_obj
, (long) count_keep_str
, (long) heap
->mark_and_sweep_trigger_counter
));
1362 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, no voluntary trigger",
1363 (long) count_keep_obj
, (long) count_keep_str
));
1369 #else /* DUK_USE_MARK_AND_SWEEP */
1371 /* no mark-and-sweep gc */
1373 #endif /* DUK_USE_MARK_AND_SWEEP */