]>
Commit | Line | Data |
---|---|---|
7c673cae FG |
1 | /* |
2 | * Manipulation of thread stacks (valstack, callstack, catchstack). | |
3 | * | |
4 | * Ideally unwinding of stacks should have no side effects, which would | |
5 | * then favor separate unwinding and shrink check primitives for each | |
6 | * stack type. A shrink check may realloc and thus have side effects. | |
7 | * | |
8 | * However, currently callstack unwinding itself has side effects, as it | |
9 | * needs to DECREF multiple objects, close environment records, etc. | |
10 | * Stacks must thus be unwound in the correct order by the caller. | |
11 | * | |
12 | * (XXX: This should be probably reworked so that there is a shared | |
13 | * unwind primitive which handles all stacks as requested, and knows | |
14 | * the proper order for unwinding.) | |
15 | * | |
16 | * Valstack entries above 'top' are always kept initialized to | |
17 | * "undefined unused". Callstack and catchstack entries above 'top' | |
18 | * are not zeroed and are left as garbage. | |
19 | * | |
20 | * Value stack handling is mostly a part of the API implementation. | |
21 | */ | |
22 | ||
23 | #include "duk_internal.h" | |
24 | ||
25 | /* check that there is space for at least one new entry */ | |
26 | DUK_INTERNAL void duk_hthread_callstack_grow(duk_hthread *thr) { | |
27 | duk_activation *new_ptr; | |
28 | duk_size_t old_size; | |
29 | duk_size_t new_size; | |
30 | ||
31 | DUK_ASSERT(thr != NULL); | |
32 | DUK_ASSERT_DISABLE(thr->callstack_top >= 0); /* avoid warning (unsigned) */ | |
33 | DUK_ASSERT(thr->callstack_size >= thr->callstack_top); | |
34 | ||
35 | if (thr->callstack_top < thr->callstack_size) { | |
36 | return; | |
37 | } | |
38 | ||
39 | old_size = thr->callstack_size; | |
40 | new_size = old_size + DUK_CALLSTACK_GROW_STEP; | |
41 | ||
42 | /* this is a bit approximate (errors out before max is reached); this is OK */ | |
43 | if (new_size >= thr->callstack_max) { | |
11fdf7f2 | 44 | DUK_ERROR_RANGE(thr, DUK_STR_CALLSTACK_LIMIT); |
7c673cae FG |
45 | } |
46 | ||
47 | DUK_DD(DUK_DDPRINT("growing callstack %ld -> %ld", (long) old_size, (long) new_size)); | |
48 | ||
49 | /* | |
50 | * Note: must use indirect variant of DUK_REALLOC() because underlying | |
51 | * pointer may be changed by mark-and-sweep. | |
52 | */ | |
53 | ||
54 | DUK_ASSERT(new_size > 0); | |
55 | new_ptr = (duk_activation *) DUK_REALLOC_INDIRECT(thr->heap, duk_hthread_get_callstack_ptr, (void *) thr, sizeof(duk_activation) * new_size); | |
56 | if (!new_ptr) { | |
57 | /* No need for a NULL/zero-size check because new_size > 0) */ | |
11fdf7f2 | 58 | DUK_ERROR_ALLOC_DEFMSG(thr); |
7c673cae FG |
59 | } |
60 | thr->callstack = new_ptr; | |
61 | thr->callstack_size = new_size; | |
62 | ||
63 | /* note: any entries above the callstack top are garbage and not zeroed */ | |
64 | } | |
65 | ||
66 | DUK_INTERNAL void duk_hthread_callstack_shrink_check(duk_hthread *thr) { | |
67 | duk_size_t new_size; | |
68 | duk_activation *p; | |
69 | ||
70 | DUK_ASSERT(thr != NULL); | |
71 | DUK_ASSERT_DISABLE(thr->callstack_top >= 0); /* avoid warning (unsigned) */ | |
72 | DUK_ASSERT(thr->callstack_size >= thr->callstack_top); | |
73 | ||
74 | if (thr->callstack_size - thr->callstack_top < DUK_CALLSTACK_SHRINK_THRESHOLD) { | |
75 | return; | |
76 | } | |
77 | ||
78 | new_size = thr->callstack_top + DUK_CALLSTACK_SHRINK_SPARE; | |
79 | DUK_ASSERT(new_size >= thr->callstack_top); | |
80 | ||
81 | DUK_DD(DUK_DDPRINT("shrinking callstack %ld -> %ld", (long) thr->callstack_size, (long) new_size)); | |
82 | ||
83 | /* | |
84 | * Note: must use indirect variant of DUK_REALLOC() because underlying | |
85 | * pointer may be changed by mark-and-sweep. | |
86 | */ | |
87 | ||
88 | /* shrink failure is not fatal */ | |
89 | p = (duk_activation *) DUK_REALLOC_INDIRECT(thr->heap, duk_hthread_get_callstack_ptr, (void *) thr, sizeof(duk_activation) * new_size); | |
90 | if (p) { | |
91 | thr->callstack = p; | |
92 | thr->callstack_size = new_size; | |
93 | } else { | |
94 | /* Because new_size != 0, if condition doesn't need to be | |
95 | * (p != NULL || new_size == 0). | |
96 | */ | |
97 | DUK_ASSERT(new_size != 0); | |
98 | DUK_D(DUK_DPRINT("callstack shrink failed, ignoring")); | |
99 | } | |
100 | ||
101 | /* note: any entries above the callstack top are garbage and not zeroed */ | |
102 | } | |
103 | ||
104 | DUK_INTERNAL void duk_hthread_callstack_unwind(duk_hthread *thr, duk_size_t new_top) { | |
105 | duk_size_t idx; | |
106 | ||
107 | DUK_DDD(DUK_DDDPRINT("unwind callstack top of thread %p from %ld to %ld", | |
108 | (void *) thr, | |
109 | (thr != NULL ? (long) thr->callstack_top : (long) -1), | |
110 | (long) new_top)); | |
111 | ||
112 | DUK_ASSERT(thr); | |
113 | DUK_ASSERT(thr->heap); | |
114 | DUK_ASSERT_DISABLE(new_top >= 0); /* unsigned */ | |
115 | DUK_ASSERT((duk_size_t) new_top <= thr->callstack_top); /* cannot grow */ | |
116 | ||
117 | /* | |
118 | * The loop below must avoid issues with potential callstack | |
119 | * reallocations. A resize (and other side effects) may happen | |
120 | * e.g. due to finalizer/errhandler calls caused by a refzero or | |
121 | * mark-and-sweep. Arbitrary finalizers may run, because when | |
122 | * an environment record is refzero'd, it may refer to arbitrary | |
123 | * values which also become refzero'd. | |
124 | * | |
125 | * So, the pointer 'p' is re-looked-up below whenever a side effect | |
126 | * might have changed it. | |
127 | */ | |
128 | ||
129 | idx = thr->callstack_top; | |
130 | while (idx > new_top) { | |
131 | duk_activation *act; | |
132 | duk_hobject *func; | |
133 | #ifdef DUK_USE_REFERENCE_COUNTING | |
134 | duk_hobject *tmp; | |
135 | #endif | |
136 | #ifdef DUK_USE_DEBUGGER_SUPPORT | |
137 | duk_heap *heap; | |
138 | #endif | |
139 | ||
140 | idx--; | |
141 | DUK_ASSERT_DISABLE(idx >= 0); /* unsigned */ | |
142 | DUK_ASSERT((duk_size_t) idx < thr->callstack_size); /* true, despite side effect resizes */ | |
143 | ||
144 | act = thr->callstack + idx; | |
145 | /* With lightfuncs, act 'func' may be NULL */ | |
146 | ||
147 | #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY | |
148 | /* | |
149 | * Restore 'caller' property for non-strict callee functions. | |
150 | */ | |
151 | ||
152 | func = DUK_ACT_GET_FUNC(act); | |
153 | if (func != NULL && !DUK_HOBJECT_HAS_STRICT(func)) { | |
154 | duk_tval *tv_caller; | |
155 | duk_tval tv_tmp; | |
156 | duk_hobject *h_tmp; | |
157 | ||
158 | tv_caller = duk_hobject_find_existing_entry_tval_ptr(thr->heap, func, DUK_HTHREAD_STRING_CALLER(thr)); | |
159 | ||
160 | /* The act->prev_caller should only be set if the entry for 'caller' | |
161 | * exists (as it is only set in that case, and the property is not | |
162 | * configurable), but handle all the cases anyway. | |
163 | */ | |
164 | ||
165 | if (tv_caller) { | |
166 | DUK_TVAL_SET_TVAL(&tv_tmp, tv_caller); | |
167 | if (act->prev_caller) { | |
168 | /* Just transfer the refcount from act->prev_caller to tv_caller, | |
169 | * so no need for a refcount update. This is the expected case. | |
170 | */ | |
171 | DUK_TVAL_SET_OBJECT(tv_caller, act->prev_caller); | |
172 | act->prev_caller = NULL; | |
173 | } else { | |
174 | DUK_TVAL_SET_NULL(tv_caller); /* no incref needed */ | |
175 | DUK_ASSERT(act->prev_caller == NULL); | |
176 | } | |
177 | DUK_TVAL_DECREF(thr, &tv_tmp); /* side effects */ | |
178 | } else { | |
179 | h_tmp = act->prev_caller; | |
180 | if (h_tmp) { | |
181 | act->prev_caller = NULL; | |
182 | DUK_HOBJECT_DECREF(thr, h_tmp); /* side effects */ | |
183 | } | |
184 | } | |
185 | act = thr->callstack + idx; /* avoid side effects */ | |
186 | DUK_ASSERT(act->prev_caller == NULL); | |
187 | } | |
188 | #endif | |
189 | ||
190 | /* | |
191 | * Unwind debugger state. If we unwind while stepping | |
192 | * (either step over or step into), pause execution. | |
193 | */ | |
194 | ||
195 | #if defined(DUK_USE_DEBUGGER_SUPPORT) | |
196 | heap = thr->heap; | |
197 | if (heap->dbg_step_thread == thr && | |
198 | heap->dbg_step_csindex == idx) { | |
199 | /* Pause for all step types: step into, step over, step out. | |
200 | * This is the only place explicitly handling a step out. | |
201 | */ | |
202 | DUK_HEAP_SET_PAUSED(heap); | |
203 | DUK_ASSERT(heap->dbg_step_thread == NULL); | |
204 | } | |
205 | #endif | |
206 | ||
207 | /* | |
208 | * Close environment record(s) if they exist. | |
209 | * | |
210 | * Only variable environments are closed. If lex_env != var_env, it | |
211 | * cannot currently contain any register bound declarations. | |
212 | * | |
213 | * Only environments created for a NEWENV function are closed. If an | |
214 | * environment is created for e.g. an eval call, it must not be closed. | |
215 | */ | |
216 | ||
217 | func = DUK_ACT_GET_FUNC(act); | |
218 | if (func != NULL && !DUK_HOBJECT_HAS_NEWENV(func)) { | |
219 | DUK_DDD(DUK_DDDPRINT("skip closing environments, envs not owned by this activation")); | |
220 | goto skip_env_close; | |
221 | } | |
222 | /* func is NULL for lightfunc */ | |
223 | ||
224 | DUK_ASSERT(act->lex_env == act->var_env); | |
225 | if (act->var_env != NULL) { | |
226 | DUK_DDD(DUK_DDDPRINT("closing var_env record %p -> %!O", | |
227 | (void *) act->var_env, (duk_heaphdr *) act->var_env)); | |
228 | duk_js_close_environment_record(thr, act->var_env, func, act->idx_bottom); | |
229 | act = thr->callstack + idx; /* avoid side effect issues */ | |
230 | } | |
231 | ||
232 | #if 0 | |
233 | if (act->lex_env != NULL) { | |
234 | if (act->lex_env == act->var_env) { | |
235 | /* common case, already closed, so skip */ | |
236 | DUK_DD(DUK_DDPRINT("lex_env and var_env are the same and lex_env " | |
237 | "already closed -> skip closing lex_env")); | |
238 | ; | |
239 | } else { | |
240 | DUK_DD(DUK_DDPRINT("closing lex_env record %p -> %!O", | |
241 | (void *) act->lex_env, (duk_heaphdr *) act->lex_env)); | |
242 | duk_js_close_environment_record(thr, act->lex_env, DUK_ACT_GET_FUNC(act), act->idx_bottom); | |
243 | act = thr->callstack + idx; /* avoid side effect issues */ | |
244 | } | |
245 | } | |
246 | #endif | |
247 | ||
248 | DUK_ASSERT((act->lex_env == NULL) || | |
249 | ((duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->lex_env, DUK_HTHREAD_STRING_INT_CALLEE(thr)) == NULL) && | |
250 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->lex_env, DUK_HTHREAD_STRING_INT_VARMAP(thr)) == NULL) && | |
251 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->lex_env, DUK_HTHREAD_STRING_INT_THREAD(thr)) == NULL) && | |
252 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->lex_env, DUK_HTHREAD_STRING_INT_REGBASE(thr)) == NULL))); | |
253 | ||
254 | DUK_ASSERT((act->var_env == NULL) || | |
255 | ((duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->var_env, DUK_HTHREAD_STRING_INT_CALLEE(thr)) == NULL) && | |
256 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->var_env, DUK_HTHREAD_STRING_INT_VARMAP(thr)) == NULL) && | |
257 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->var_env, DUK_HTHREAD_STRING_INT_THREAD(thr)) == NULL) && | |
258 | (duk_hobject_find_existing_entry_tval_ptr(thr->heap, act->var_env, DUK_HTHREAD_STRING_INT_REGBASE(thr)) == NULL))); | |
259 | ||
260 | skip_env_close: | |
261 | ||
262 | /* | |
263 | * Update preventcount | |
264 | */ | |
265 | ||
266 | if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) { | |
267 | DUK_ASSERT(thr->callstack_preventcount >= 1); | |
268 | thr->callstack_preventcount--; | |
269 | } | |
270 | ||
271 | /* | |
272 | * Reference count updates | |
273 | * | |
274 | * Note: careful manipulation of refcounts. The top is | |
275 | * not updated yet, so all the activations are reachable | |
276 | * for mark-and-sweep (which may be triggered by decref). | |
277 | * However, the pointers are NULL so this is not an issue. | |
278 | */ | |
279 | ||
280 | #ifdef DUK_USE_REFERENCE_COUNTING | |
281 | tmp = act->var_env; | |
282 | #endif | |
283 | act->var_env = NULL; | |
284 | #ifdef DUK_USE_REFERENCE_COUNTING | |
285 | DUK_HOBJECT_DECREF_ALLOWNULL(thr, tmp); | |
286 | act = thr->callstack + idx; /* avoid side effect issues */ | |
287 | #endif | |
288 | ||
289 | #ifdef DUK_USE_REFERENCE_COUNTING | |
290 | tmp = act->lex_env; | |
291 | #endif | |
292 | act->lex_env = NULL; | |
293 | #ifdef DUK_USE_REFERENCE_COUNTING | |
294 | DUK_HOBJECT_DECREF_ALLOWNULL(thr, tmp); | |
295 | act = thr->callstack + idx; /* avoid side effect issues */ | |
296 | #endif | |
297 | ||
298 | /* Note: this may cause a corner case situation where a finalizer | |
299 | * may see a currently reachable activation whose 'func' is NULL. | |
300 | */ | |
301 | #ifdef DUK_USE_REFERENCE_COUNTING | |
302 | tmp = DUK_ACT_GET_FUNC(act); | |
303 | #endif | |
304 | act->func = NULL; | |
305 | #ifdef DUK_USE_REFERENCE_COUNTING | |
306 | DUK_HOBJECT_DECREF_ALLOWNULL(thr, tmp); | |
307 | act = thr->callstack + idx; /* avoid side effect issues */ | |
308 | DUK_UNREF(act); | |
309 | #endif | |
310 | } | |
311 | ||
312 | thr->callstack_top = new_top; | |
313 | ||
314 | /* | |
315 | * We could clear the book-keeping variables for the topmost activation, | |
316 | * but don't do so now. | |
317 | */ | |
318 | #if 0 | |
319 | if (thr->callstack_top > 0) { | |
320 | duk_activation *act = thr->callstack + thr->callstack_top - 1; | |
321 | act->idx_retval = 0; | |
322 | } | |
323 | #endif | |
324 | ||
325 | /* Note: any entries above the callstack top are garbage and not zeroed. | |
326 | * Also topmost activation idx_retval is garbage (not zeroed), and must | |
327 | * be ignored. | |
328 | */ | |
329 | } | |
330 | ||
331 | DUK_INTERNAL void duk_hthread_catchstack_grow(duk_hthread *thr) { | |
332 | duk_catcher *new_ptr; | |
333 | duk_size_t old_size; | |
334 | duk_size_t new_size; | |
335 | ||
336 | DUK_ASSERT(thr != NULL); | |
337 | DUK_ASSERT_DISABLE(thr->catchstack_top); /* avoid warning (unsigned) */ | |
338 | DUK_ASSERT(thr->catchstack_size >= thr->catchstack_top); | |
339 | ||
340 | if (thr->catchstack_top < thr->catchstack_size) { | |
341 | return; | |
342 | } | |
343 | ||
344 | old_size = thr->catchstack_size; | |
345 | new_size = old_size + DUK_CATCHSTACK_GROW_STEP; | |
346 | ||
347 | /* this is a bit approximate (errors out before max is reached); this is OK */ | |
348 | if (new_size >= thr->catchstack_max) { | |
11fdf7f2 | 349 | DUK_ERROR_RANGE(thr, DUK_STR_CATCHSTACK_LIMIT); |
7c673cae FG |
350 | } |
351 | ||
352 | DUK_DD(DUK_DDPRINT("growing catchstack %ld -> %ld", (long) old_size, (long) new_size)); | |
353 | ||
354 | /* | |
355 | * Note: must use indirect variant of DUK_REALLOC() because underlying | |
356 | * pointer may be changed by mark-and-sweep. | |
357 | */ | |
358 | ||
359 | DUK_ASSERT(new_size > 0); | |
360 | new_ptr = (duk_catcher *) DUK_REALLOC_INDIRECT(thr->heap, duk_hthread_get_catchstack_ptr, (void *) thr, sizeof(duk_catcher) * new_size); | |
361 | if (!new_ptr) { | |
362 | /* No need for a NULL/zero-size check because new_size > 0) */ | |
11fdf7f2 | 363 | DUK_ERROR_ALLOC_DEFMSG(thr); |
7c673cae FG |
364 | } |
365 | thr->catchstack = new_ptr; | |
366 | thr->catchstack_size = new_size; | |
367 | ||
368 | /* note: any entries above the catchstack top are garbage and not zeroed */ | |
369 | } | |
370 | ||
371 | DUK_INTERNAL void duk_hthread_catchstack_shrink_check(duk_hthread *thr) { | |
372 | duk_size_t new_size; | |
373 | duk_catcher *p; | |
374 | ||
375 | DUK_ASSERT(thr != NULL); | |
376 | DUK_ASSERT_DISABLE(thr->catchstack_top >= 0); /* avoid warning (unsigned) */ | |
377 | DUK_ASSERT(thr->catchstack_size >= thr->catchstack_top); | |
378 | ||
379 | if (thr->catchstack_size - thr->catchstack_top < DUK_CATCHSTACK_SHRINK_THRESHOLD) { | |
380 | return; | |
381 | } | |
382 | ||
383 | new_size = thr->catchstack_top + DUK_CATCHSTACK_SHRINK_SPARE; | |
384 | DUK_ASSERT(new_size >= thr->catchstack_top); | |
385 | ||
386 | DUK_DD(DUK_DDPRINT("shrinking catchstack %ld -> %ld", (long) thr->catchstack_size, (long) new_size)); | |
387 | ||
388 | /* | |
389 | * Note: must use indirect variant of DUK_REALLOC() because underlying | |
390 | * pointer may be changed by mark-and-sweep. | |
391 | */ | |
392 | ||
393 | /* shrink failure is not fatal */ | |
394 | p = (duk_catcher *) DUK_REALLOC_INDIRECT(thr->heap, duk_hthread_get_catchstack_ptr, (void *) thr, sizeof(duk_catcher) * new_size); | |
395 | if (p) { | |
396 | thr->catchstack = p; | |
397 | thr->catchstack_size = new_size; | |
398 | } else { | |
399 | /* Because new_size != 0, if condition doesn't need to be | |
400 | * (p != NULL || new_size == 0). | |
401 | */ | |
402 | DUK_ASSERT(new_size != 0); | |
403 | DUK_D(DUK_DPRINT("catchstack shrink failed, ignoring")); | |
404 | } | |
405 | ||
406 | /* note: any entries above the catchstack top are garbage and not zeroed */ | |
407 | } | |
408 | ||
409 | DUK_INTERNAL void duk_hthread_catchstack_unwind(duk_hthread *thr, duk_size_t new_top) { | |
410 | duk_size_t idx; | |
411 | ||
412 | DUK_DDD(DUK_DDDPRINT("unwind catchstack top of thread %p from %ld to %ld", | |
413 | (void *) thr, | |
414 | (thr != NULL ? (long) thr->catchstack_top : (long) -1), | |
415 | (long) new_top)); | |
416 | ||
417 | DUK_ASSERT(thr); | |
418 | DUK_ASSERT(thr->heap); | |
419 | DUK_ASSERT_DISABLE(new_top >= 0); /* unsigned */ | |
420 | DUK_ASSERT((duk_size_t) new_top <= thr->catchstack_top); /* cannot grow */ | |
421 | ||
422 | /* | |
423 | * Since there are no references in the catcher structure, | |
424 | * unwinding is quite simple. The only thing we need to | |
425 | * look out for is popping a possible lexical environment | |
426 | * established for an active catch clause. | |
427 | */ | |
428 | ||
429 | idx = thr->catchstack_top; | |
430 | while (idx > new_top) { | |
431 | duk_catcher *p; | |
432 | duk_activation *act; | |
433 | duk_hobject *env; | |
434 | ||
435 | idx--; | |
436 | DUK_ASSERT_DISABLE(idx >= 0); /* unsigned */ | |
437 | DUK_ASSERT((duk_size_t) idx < thr->catchstack_size); | |
438 | ||
439 | p = thr->catchstack + idx; | |
440 | ||
441 | if (DUK_CAT_HAS_LEXENV_ACTIVE(p)) { | |
442 | DUK_DDD(DUK_DDDPRINT("unwinding catchstack idx %ld, callstack idx %ld, callstack top %ld: lexical environment active", | |
443 | (long) idx, (long) p->callstack_index, (long) thr->callstack_top)); | |
444 | ||
445 | /* XXX: Here we have a nasty dependency: the need to manipulate | |
446 | * the callstack means that catchstack must always be unwound by | |
447 | * the caller before unwinding the callstack. This should be fixed | |
448 | * later. | |
449 | */ | |
450 | ||
451 | /* Note that multiple catchstack entries may refer to the same | |
452 | * callstack entry. | |
453 | */ | |
454 | act = thr->callstack + p->callstack_index; | |
455 | DUK_ASSERT(act >= thr->callstack); | |
456 | DUK_ASSERT(act < thr->callstack + thr->callstack_top); | |
457 | ||
458 | DUK_DDD(DUK_DDDPRINT("catchstack_index=%ld, callstack_index=%ld, lex_env=%!iO", | |
459 | (long) idx, (long) p->callstack_index, | |
460 | (duk_heaphdr *) act->lex_env)); | |
461 | ||
462 | env = act->lex_env; /* current lex_env of the activation (created for catcher) */ | |
463 | DUK_ASSERT(env != NULL); /* must be, since env was created when catcher was created */ | |
464 | act->lex_env = DUK_HOBJECT_GET_PROTOTYPE(thr->heap, env); /* prototype is lex_env before catcher created */ | |
465 | DUK_HOBJECT_DECREF(thr, env); | |
466 | ||
467 | /* There is no need to decref anything else than 'env': if 'env' | |
468 | * becomes unreachable, refzero will handle decref'ing its prototype. | |
469 | */ | |
470 | } | |
471 | } | |
472 | ||
473 | thr->catchstack_top = new_top; | |
474 | ||
475 | /* note: any entries above the catchstack top are garbage and not zeroed */ | |
476 | } |