]> git.proxmox.com Git - ceph.git/blob - ceph/src/civetweb/src/third_party/duktape-1.3.0/src-separate/duk_js_call.c
bump version to 12.2.12-pve1
[ceph.git] / ceph / src / civetweb / src / third_party / duktape-1.3.0 / src-separate / duk_js_call.c
1 /*
2 * Call handling.
3 *
4 * The main work horse functions are:
5 * - duk_handle_call(): call to a C/Ecmascript functions
6 * - duk_handle_safe_call(): make a protected C call within current activation
7 * - duk_handle_ecma_call_setup(): Ecmascript-to-Ecmascript calls, including
8 * tail calls and coroutine resume
9 */
10
11 #include "duk_internal.h"
12
13 /*
14 * Misc
15 */
16
17 #if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
18 DUK_LOCAL void duk__interrupt_fixup(duk_hthread *thr, duk_hthread *entry_curr_thread) {
19 /* XXX: Currently the bytecode executor and executor interrupt
20 * instruction counts are off because we don't execute the
21 * interrupt handler when we're about to exit from the initial
22 * user call into Duktape.
23 *
24 * If we were to execute the interrupt handler here, the counts
25 * would match. You can enable this block manually to check
26 * that this is the case.
27 */
28
29 DUK_ASSERT(thr != NULL);
30 DUK_ASSERT(thr->heap != NULL);
31
32 #if 0
33 if (entry_curr_thread == NULL) {
34 thr->interrupt_init = thr->interrupt_init - thr->interrupt_counter;
35 thr->heap->inst_count_interrupt += thr->interrupt_init;
36 DUK_DD(DUK_DDPRINT("debug test: updated interrupt count on exit to "
37 "user code, instruction counts: executor=%ld, interrupt=%ld",
38 (long) thr->heap->inst_count_exec, (long) thr->heap->inst_count_interrupt));
39 DUK_ASSERT(thr->heap->inst_count_exec == thr->heap->inst_count_interrupt);
40 }
41 #else
42 DUK_UNREF(thr);
43 DUK_UNREF(entry_curr_thread);
44 #endif
45 }
46 #endif
47
48 /*
49 * Arguments object creation.
50 *
51 * Creating arguments objects is a bit finicky, see E5 Section 10.6 for the
52 * specific requirements. Much of the arguments object exotic behavior is
53 * implemented in duk_hobject_props.c, and is enabled by the object flag
54 * DUK_HOBJECT_FLAG_EXOTIC_ARGUMENTS.
55 */
56
57 DUK_LOCAL
58 void duk__create_arguments_object(duk_hthread *thr,
59 duk_hobject *func,
60 duk_hobject *varenv,
61 duk_idx_t idx_argbase, /* idx of first argument on stack */
62 duk_idx_t num_stack_args) { /* num args starting from idx_argbase */
63 duk_context *ctx = (duk_context *) thr;
64 duk_hobject *arg; /* 'arguments' */
65 duk_hobject *formals; /* formals for 'func' (may be NULL if func is a C function) */
66 duk_idx_t i_arg;
67 duk_idx_t i_map;
68 duk_idx_t i_mappednames;
69 duk_idx_t i_formals;
70 duk_idx_t i_argbase;
71 duk_idx_t n_formals;
72 duk_idx_t idx;
73 duk_bool_t need_map;
74
75 DUK_DDD(DUK_DDDPRINT("creating arguments object for func=%!iO, varenv=%!iO, "
76 "idx_argbase=%ld, num_stack_args=%ld",
77 (duk_heaphdr *) func, (duk_heaphdr *) varenv,
78 (long) idx_argbase, (long) num_stack_args));
79
80 DUK_ASSERT(thr != NULL);
81 DUK_ASSERT(func != NULL);
82 DUK_ASSERT(DUK_HOBJECT_IS_NONBOUND_FUNCTION(func));
83 DUK_ASSERT(varenv != NULL);
84 DUK_ASSERT(idx_argbase >= 0); /* assumed to bottom relative */
85 DUK_ASSERT(num_stack_args >= 0);
86
87 need_map = 0;
88
89 i_argbase = idx_argbase;
90 DUK_ASSERT(i_argbase >= 0);
91
92 duk_push_hobject(ctx, func);
93 duk_get_prop_stridx(ctx, -1, DUK_STRIDX_INT_FORMALS);
94 formals = duk_get_hobject(ctx, -1);
95 n_formals = 0;
96 if (formals) {
97 duk_get_prop_stridx(ctx, -1, DUK_STRIDX_LENGTH);
98 n_formals = (duk_idx_t) duk_require_int(ctx, -1);
99 duk_pop(ctx);
100 }
101 duk_remove(ctx, -2); /* leave formals on stack for later use */
102 i_formals = duk_require_top_index(ctx);
103
104 DUK_ASSERT(n_formals >= 0);
105 DUK_ASSERT(formals != NULL || n_formals == 0);
106
107 DUK_DDD(DUK_DDDPRINT("func=%!O, formals=%!O, n_formals=%ld",
108 (duk_heaphdr *) func, (duk_heaphdr *) formals,
109 (long) n_formals));
110
111 /* [ ... formals ] */
112
113 /*
114 * Create required objects:
115 * - 'arguments' object: array-like, but not an array
116 * - 'map' object: internal object, tied to 'arguments'
117 * - 'mappedNames' object: temporary value used during construction
118 */
119
120 i_arg = duk_push_object_helper(ctx,
121 DUK_HOBJECT_FLAG_EXTENSIBLE |
122 DUK_HOBJECT_FLAG_ARRAY_PART |
123 DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_ARGUMENTS),
124 DUK_BIDX_OBJECT_PROTOTYPE);
125 DUK_ASSERT(i_arg >= 0);
126 arg = duk_require_hobject(ctx, -1);
127 DUK_ASSERT(arg != NULL);
128
129 i_map = duk_push_object_helper(ctx,
130 DUK_HOBJECT_FLAG_EXTENSIBLE |
131 DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_OBJECT),
132 -1); /* no prototype */
133 DUK_ASSERT(i_map >= 0);
134
135 i_mappednames = duk_push_object_helper(ctx,
136 DUK_HOBJECT_FLAG_EXTENSIBLE |
137 DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_OBJECT),
138 -1); /* no prototype */
139 DUK_ASSERT(i_mappednames >= 0);
140
141 /* [... formals arguments map mappedNames] */
142
143 DUK_DDD(DUK_DDDPRINT("created arguments related objects: "
144 "arguments at index %ld -> %!O "
145 "map at index %ld -> %!O "
146 "mappednames at index %ld -> %!O",
147 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
148 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
149 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
150
151 /*
152 * Init arguments properties, map, etc.
153 */
154
155 duk_push_int(ctx, num_stack_args);
156 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_LENGTH, DUK_PROPDESC_FLAGS_WC);
157
158 /*
159 * Init argument related properties
160 */
161
162 /* step 11 */
163 idx = num_stack_args - 1;
164 while (idx >= 0) {
165 DUK_DDD(DUK_DDDPRINT("arg idx %ld, argbase=%ld, argidx=%ld",
166 (long) idx, (long) i_argbase, (long) (i_argbase + idx)));
167
168 DUK_DDD(DUK_DDDPRINT("define arguments[%ld]=arg", (long) idx));
169 duk_dup(ctx, i_argbase + idx);
170 duk_xdef_prop_index_wec(ctx, i_arg, (duk_uarridx_t) idx);
171 DUK_DDD(DUK_DDDPRINT("defined arguments[%ld]=arg", (long) idx));
172
173 /* step 11.c is relevant only if non-strict (checked in 11.c.ii) */
174 if (!DUK_HOBJECT_HAS_STRICT(func) && idx < n_formals) {
175 DUK_ASSERT(formals != NULL);
176
177 DUK_DDD(DUK_DDDPRINT("strict function, index within formals (%ld < %ld)",
178 (long) idx, (long) n_formals));
179
180 duk_get_prop_index(ctx, i_formals, idx);
181 DUK_ASSERT(duk_is_string(ctx, -1));
182
183 duk_dup(ctx, -1); /* [... name name] */
184
185 if (!duk_has_prop(ctx, i_mappednames)) {
186 /* steps 11.c.ii.1 - 11.c.ii.4, but our internal book-keeping
187 * differs from the reference model
188 */
189
190 /* [... name] */
191
192 need_map = 1;
193
194 DUK_DDD(DUK_DDDPRINT("set mappednames[%s]=%ld",
195 (const char *) duk_get_string(ctx, -1),
196 (long) idx));
197 duk_dup(ctx, -1); /* name */
198 duk_push_uint(ctx, (duk_uint_t) idx); /* index */
199 duk_to_string(ctx, -1);
200 duk_xdef_prop_wec(ctx, i_mappednames); /* out of spec, must be configurable */
201
202 DUK_DDD(DUK_DDDPRINT("set map[%ld]=%s",
203 (long) idx,
204 duk_get_string(ctx, -1)));
205 duk_dup(ctx, -1); /* name */
206 duk_xdef_prop_index_wec(ctx, i_map, (duk_uarridx_t) idx); /* out of spec, must be configurable */
207 } else {
208 /* duk_has_prop() popped the second 'name' */
209 }
210
211 /* [... name] */
212 duk_pop(ctx); /* pop 'name' */
213 }
214
215 idx--;
216 }
217
218 DUK_DDD(DUK_DDDPRINT("actual arguments processed"));
219
220 /* step 12 */
221 if (need_map) {
222 DUK_DDD(DUK_DDDPRINT("adding 'map' and 'varenv' to arguments object"));
223
224 /* should never happen for a strict callee */
225 DUK_ASSERT(!DUK_HOBJECT_HAS_STRICT(func));
226
227 duk_dup(ctx, i_map);
228 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_MAP, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
229
230 /* The variable environment for magic variable bindings needs to be
231 * given by the caller and recorded in the arguments object.
232 *
233 * See E5 Section 10.6, the creation of setters/getters.
234 *
235 * The variable environment also provides access to the callee, so
236 * an explicit (internal) callee property is not needed.
237 */
238
239 duk_push_hobject(ctx, varenv);
240 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_INT_VARENV, DUK_PROPDESC_FLAGS_NONE); /* out of spec, don't care */
241 }
242
243 /* steps 13-14 */
244 if (DUK_HOBJECT_HAS_STRICT(func)) {
245 /*
246 * Note: callee/caller are throwers and are not deletable etc.
247 * They could be implemented as virtual properties, but currently
248 * there is no support for virtual properties which are accessors
249 * (only plain virtual properties). This would not be difficult
250 * to change in duk_hobject_props, but we can make the throwers
251 * normal, concrete properties just as easily.
252 *
253 * Note that the specification requires that the *same* thrower
254 * built-in object is used here! See E5 Section 10.6 main
255 * algoritm, step 14, and Section 13.2.3 which describes the
256 * thrower. See test case test-arguments-throwers.js.
257 */
258
259 DUK_DDD(DUK_DDDPRINT("strict function, setting caller/callee to throwers"));
260
261 duk_xdef_prop_stridx_thrower(ctx, i_arg, DUK_STRIDX_CALLER, DUK_PROPDESC_FLAGS_NONE);
262 duk_xdef_prop_stridx_thrower(ctx, i_arg, DUK_STRIDX_CALLEE, DUK_PROPDESC_FLAGS_NONE);
263 } else {
264 DUK_DDD(DUK_DDDPRINT("non-strict function, setting callee to actual value"));
265 duk_push_hobject(ctx, func);
266 duk_xdef_prop_stridx(ctx, i_arg, DUK_STRIDX_CALLEE, DUK_PROPDESC_FLAGS_WC);
267 }
268
269 /* set exotic behavior only after we're done */
270 if (need_map) {
271 /*
272 * Note: exotic behaviors are only enabled for arguments
273 * objects which have a parameter map (see E5 Section 10.6
274 * main algorithm, step 12).
275 *
276 * In particular, a non-strict arguments object with no
277 * mapped formals does *NOT* get exotic behavior, even
278 * for e.g. "caller" property. This seems counterintuitive
279 * but seems to be the case.
280 */
281
282 /* cannot be strict (never mapped variables) */
283 DUK_ASSERT(!DUK_HOBJECT_HAS_STRICT(func));
284
285 DUK_DDD(DUK_DDDPRINT("enabling exotic behavior for arguments object"));
286 DUK_HOBJECT_SET_EXOTIC_ARGUMENTS(arg);
287 } else {
288 DUK_DDD(DUK_DDDPRINT("not enabling exotic behavior for arguments object"));
289 }
290
291 /* nice log */
292 DUK_DDD(DUK_DDDPRINT("final arguments related objects: "
293 "arguments at index %ld -> %!O "
294 "map at index %ld -> %!O "
295 "mappednames at index %ld -> %!O",
296 (long) i_arg, (duk_heaphdr *) duk_get_hobject(ctx, i_arg),
297 (long) i_map, (duk_heaphdr *) duk_get_hobject(ctx, i_map),
298 (long) i_mappednames, (duk_heaphdr *) duk_get_hobject(ctx, i_mappednames)));
299
300 /* [args(n) [crud] formals arguments map mappednames] -> [args [crud] arguments] */
301 duk_pop_2(ctx);
302 duk_remove(ctx, -2);
303 }
304
305 /* Helper for creating the arguments object and adding it to the env record
306 * on top of the value stack. This helper has a very strict dependency on
307 * the shape of the input stack.
308 */
309 DUK_LOCAL
310 void duk__handle_createargs_for_call(duk_hthread *thr,
311 duk_hobject *func,
312 duk_hobject *env,
313 duk_idx_t num_stack_args) {
314 duk_context *ctx = (duk_context *) thr;
315
316 DUK_DDD(DUK_DDDPRINT("creating arguments object for function call"));
317
318 DUK_ASSERT(thr != NULL);
319 DUK_ASSERT(func != NULL);
320 DUK_ASSERT(env != NULL);
321 DUK_ASSERT(DUK_HOBJECT_HAS_CREATEARGS(func));
322 DUK_ASSERT(duk_get_top(ctx) >= num_stack_args + 1);
323
324 /* [... arg1 ... argN envobj] */
325
326 duk__create_arguments_object(thr,
327 func,
328 env,
329 duk_get_top(ctx) - num_stack_args - 1, /* idx_argbase */
330 num_stack_args);
331
332 /* [... arg1 ... argN envobj argobj] */
333
334 duk_xdef_prop_stridx(ctx,
335 -2,
336 DUK_STRIDX_LC_ARGUMENTS,
337 DUK_HOBJECT_HAS_STRICT(func) ? DUK_PROPDESC_FLAGS_E : /* strict: non-deletable, non-writable */
338 DUK_PROPDESC_FLAGS_WE); /* non-strict: non-deletable, writable */
339 /* [... arg1 ... argN envobj] */
340 }
341
342 /*
343 * Helper for handling a "bound function" chain when a call is being made.
344 *
345 * Follows the bound function chain until a non-bound function is found.
346 * Prepends the bound arguments to the value stack (at idx_func + 2),
347 * updating 'num_stack_args' in the process. The 'this' binding is also
348 * updated if necessary (at idx_func + 1). Note that for constructor calls
349 * the 'this' binding is never updated by [[BoundThis]].
350 *
351 * XXX: bound function chains could be collapsed at bound function creation
352 * time so that each bound function would point directly to a non-bound
353 * function. This would make call time handling much easier.
354 */
355
356 DUK_LOCAL
357 void duk__handle_bound_chain_for_call(duk_hthread *thr,
358 duk_idx_t idx_func,
359 duk_idx_t *p_num_stack_args, /* may be changed by call */
360 duk_bool_t is_constructor_call) {
361 duk_context *ctx = (duk_context *) thr;
362 duk_idx_t num_stack_args;
363 duk_tval *tv_func;
364 duk_hobject *func;
365 duk_uint_t sanity;
366
367 DUK_ASSERT(thr != NULL);
368 DUK_ASSERT(p_num_stack_args != NULL);
369
370 /* On entry, item at idx_func is a bound, non-lightweight function,
371 * but we don't rely on that below.
372 */
373
374 num_stack_args = *p_num_stack_args;
375
376 sanity = DUK_HOBJECT_BOUND_CHAIN_SANITY;
377 do {
378 duk_idx_t i, len;
379
380 tv_func = duk_require_tval(ctx, idx_func);
381 DUK_ASSERT(tv_func != NULL);
382
383 if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
384 /* Lightweight function: never bound, so terminate. */
385 break;
386 } else if (DUK_TVAL_IS_OBJECT(tv_func)) {
387 func = DUK_TVAL_GET_OBJECT(tv_func);
388 if (!DUK_HOBJECT_HAS_BOUND(func)) {
389 /* Normal non-bound function. */
390 break;
391 }
392 } else {
393 /* Function.prototype.bind() should never let this happen,
394 * ugly error message is enough.
395 */
396 DUK_ERROR(thr, DUK_ERR_INTERNAL_ERROR, DUK_STR_INTERNAL_ERROR);
397 }
398 DUK_ASSERT(DUK_TVAL_GET_OBJECT(tv_func) != NULL);
399
400 /* XXX: this could be more compact by accessing the internal properties
401 * directly as own properties (they cannot be inherited, and are not
402 * externally visible).
403 */
404
405 DUK_DDD(DUK_DDDPRINT("bound function encountered, ptr=%p, num_stack_args=%ld: %!T",
406 (void *) DUK_TVAL_GET_OBJECT(tv_func), (long) num_stack_args, tv_func));
407
408 /* [ ... func this arg1 ... argN ] */
409
410 if (is_constructor_call) {
411 /* See: tests/ecmascript/test-spec-bound-constructor.js */
412 DUK_DDD(DUK_DDDPRINT("constructor call: don't update this binding"));
413 } else {
414 duk_get_prop_stridx(ctx, idx_func, DUK_STRIDX_INT_THIS);
415 duk_replace(ctx, idx_func + 1); /* idx_this = idx_func + 1 */
416 }
417
418 /* [ ... func this arg1 ... argN ] */
419
420 /* XXX: duk_get_length? */
421 duk_get_prop_stridx(ctx, idx_func, DUK_STRIDX_INT_ARGS); /* -> [ ... func this arg1 ... argN _Args ] */
422 duk_get_prop_stridx(ctx, -1, DUK_STRIDX_LENGTH); /* -> [ ... func this arg1 ... argN _Args length ] */
423 len = (duk_idx_t) duk_require_int(ctx, -1);
424 duk_pop(ctx);
425 for (i = 0; i < len; i++) {
426 /* XXX: very slow - better to bulk allocate a gap, and copy
427 * from args_array directly (we know it has a compact array
428 * part, etc).
429 */
430
431 /* [ ... func this <some bound args> arg1 ... argN _Args ] */
432 duk_get_prop_index(ctx, -1, i);
433 duk_insert(ctx, idx_func + 2 + i); /* idx_args = idx_func + 2 */
434 }
435 num_stack_args += len; /* must be updated to work properly (e.g. creation of 'arguments') */
436 duk_pop(ctx);
437
438 /* [ ... func this <bound args> arg1 ... argN ] */
439
440 duk_get_prop_stridx(ctx, idx_func, DUK_STRIDX_INT_TARGET);
441 duk_replace(ctx, idx_func); /* replace in stack */
442
443 DUK_DDD(DUK_DDDPRINT("bound function handled, num_stack_args=%ld, idx_func=%ld, curr func=%!T",
444 (long) num_stack_args, (long) idx_func, duk_get_tval(ctx, idx_func)));
445 } while (--sanity > 0);
446
447 if (sanity == 0) {
448 DUK_ERROR(thr, DUK_ERR_INTERNAL_ERROR, DUK_STR_BOUND_CHAIN_LIMIT);
449 }
450
451 DUK_DDD(DUK_DDDPRINT("final non-bound function is: %!T", duk_get_tval(ctx, idx_func)));
452
453 #ifdef DUK_USE_ASSERTIONS
454 tv_func = duk_require_tval(ctx, idx_func);
455 DUK_ASSERT(DUK_TVAL_IS_LIGHTFUNC(tv_func) || DUK_TVAL_IS_OBJECT(tv_func));
456 if (DUK_TVAL_IS_OBJECT(tv_func)) {
457 func = DUK_TVAL_GET_OBJECT(tv_func);
458 DUK_ASSERT(func != NULL);
459 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func));
460 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func) ||
461 DUK_HOBJECT_HAS_NATIVEFUNCTION(func));
462 }
463 #endif
464
465 /* write back */
466 *p_num_stack_args = num_stack_args;
467 }
468
469 /*
470 * Helper for setting up var_env and lex_env of an activation,
471 * assuming it does NOT have the DUK_HOBJECT_FLAG_NEWENV flag.
472 */
473
474 DUK_LOCAL
475 void duk__handle_oldenv_for_call(duk_hthread *thr,
476 duk_hobject *func,
477 duk_activation *act) {
478 duk_tval *tv;
479
480 DUK_ASSERT(thr != NULL);
481 DUK_ASSERT(func != NULL);
482 DUK_ASSERT(act != NULL);
483 DUK_ASSERT(!DUK_HOBJECT_HAS_NEWENV(func));
484 DUK_ASSERT(!DUK_HOBJECT_HAS_CREATEARGS(func));
485
486 tv = duk_hobject_find_existing_entry_tval_ptr(thr->heap, func, DUK_HTHREAD_STRING_INT_LEXENV(thr));
487 if (tv) {
488 DUK_ASSERT(DUK_TVAL_IS_OBJECT(tv));
489 DUK_ASSERT(DUK_HOBJECT_IS_ENV(DUK_TVAL_GET_OBJECT(tv)));
490 act->lex_env = DUK_TVAL_GET_OBJECT(tv);
491
492 tv = duk_hobject_find_existing_entry_tval_ptr(thr->heap, func, DUK_HTHREAD_STRING_INT_VARENV(thr));
493 if (tv) {
494 DUK_ASSERT(DUK_TVAL_IS_OBJECT(tv));
495 DUK_ASSERT(DUK_HOBJECT_IS_ENV(DUK_TVAL_GET_OBJECT(tv)));
496 act->var_env = DUK_TVAL_GET_OBJECT(tv);
497 } else {
498 act->var_env = act->lex_env;
499 }
500 } else {
501 act->lex_env = thr->builtins[DUK_BIDX_GLOBAL_ENV];
502 act->var_env = act->lex_env;
503 }
504
505 DUK_HOBJECT_INCREF_ALLOWNULL(thr, act->lex_env);
506 DUK_HOBJECT_INCREF_ALLOWNULL(thr, act->var_env);
507 }
508
509 /*
510 * Helper for updating callee 'caller' property.
511 */
512
513 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
514 DUK_LOCAL void duk__update_func_caller_prop(duk_hthread *thr, duk_hobject *func) {
515 duk_tval *tv_caller;
516 duk_hobject *h_tmp;
517 duk_activation *act_callee;
518 duk_activation *act_caller;
519
520 DUK_ASSERT(thr != NULL);
521 DUK_ASSERT(func != NULL);
522 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound chain resolved */
523 DUK_ASSERT(thr->callstack_top >= 1);
524
525 if (DUK_HOBJECT_HAS_STRICT(func)) {
526 /* Strict functions don't get their 'caller' updated. */
527 return;
528 }
529
530 act_callee = thr->callstack + thr->callstack_top - 1;
531 act_caller = (thr->callstack_top >= 2 ? act_callee - 1 : NULL);
532
533 /* Backup 'caller' property and update its value. */
534 tv_caller = duk_hobject_find_existing_entry_tval_ptr(thr->heap, func, DUK_HTHREAD_STRING_CALLER(thr));
535 if (tv_caller) {
536 /* If caller is global/eval code, 'caller' should be set to
537 * 'null'.
538 *
539 * XXX: there is no exotic flag to infer this correctly now.
540 * The NEWENV flag is used now which works as intended for
541 * everything (global code, non-strict eval code, and functions)
542 * except strict eval code. Bound functions are never an issue
543 * because 'func' has been resolved to a non-bound function.
544 */
545
546 if (act_caller) {
547 /* act_caller->func may be NULL in some finalization cases,
548 * just treat like we don't know the caller.
549 */
550 if (act_caller->func && !DUK_HOBJECT_HAS_NEWENV(act_caller->func)) {
551 /* Setting to NULL causes 'caller' to be set to
552 * 'null' as desired.
553 */
554 act_caller = NULL;
555 }
556 }
557
558 if (DUK_TVAL_IS_OBJECT(tv_caller)) {
559 h_tmp = DUK_TVAL_GET_OBJECT(tv_caller);
560 DUK_ASSERT(h_tmp != NULL);
561 act_callee->prev_caller = h_tmp;
562
563 /* Previous value doesn't need refcount changes because its ownership
564 * is transferred to prev_caller.
565 */
566
567 if (act_caller) {
568 DUK_ASSERT(act_caller->func != NULL);
569 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
570 DUK_TVAL_INCREF(thr, tv_caller);
571 } else {
572 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
573 }
574 } else {
575 /* 'caller' must only take on 'null' or function value */
576 DUK_ASSERT(!DUK_TVAL_IS_HEAP_ALLOCATED(tv_caller));
577 DUK_ASSERT(act_callee->prev_caller == NULL);
578 if (act_caller && act_caller->func) {
579 /* Tolerate act_caller->func == NULL which happens in
580 * some finalization cases; treat like unknown caller.
581 */
582 DUK_TVAL_SET_OBJECT(tv_caller, act_caller->func);
583 DUK_TVAL_INCREF(thr, tv_caller);
584 } else {
585 DUK_TVAL_SET_NULL(tv_caller); /* no incref */
586 }
587 }
588 }
589 }
590 #endif /* DUK_USE_NONSTD_FUNC_CALLER_PROPERTY */
591
592 /*
593 * Determine the effective 'this' binding and coerce the current value
594 * on the valstack to the effective one (in-place, at idx_this).
595 *
596 * The current this value in the valstack (at idx_this) represents either:
597 * - the caller's requested 'this' binding; or
598 * - a 'this' binding accumulated from the bound function chain
599 *
600 * The final 'this' binding for the target function may still be
601 * different, and is determined as described in E5 Section 10.4.3.
602 *
603 * For global and eval code (E5 Sections 10.4.1 and 10.4.2), we assume
604 * that the caller has provided the correct 'this' binding explicitly
605 * when calling, i.e.:
606 *
607 * - global code: this=global object
608 * - direct eval: this=copy from eval() caller's this binding
609 * - other eval: this=global object
610 *
611 * Note: this function may cause a recursive function call with arbitrary
612 * side effects, because ToObject() may be called.
613 */
614
615 DUK_LOCAL
616 void duk__coerce_effective_this_binding(duk_hthread *thr,
617 duk_hobject *func,
618 duk_idx_t idx_this) {
619 duk_context *ctx = (duk_context *) thr;
620 duk_small_int_t strict;
621
622 if (func) {
623 strict = DUK_HOBJECT_HAS_STRICT(func);
624 } else {
625 /* Lightfuncs are always considered strict. */
626 strict = 1;
627 }
628
629 if (strict) {
630 DUK_DDD(DUK_DDDPRINT("this binding: strict -> use directly"));
631 } else {
632 duk_tval *tv_this = duk_require_tval(ctx, idx_this);
633 duk_hobject *obj_global;
634
635 if (DUK_TVAL_IS_OBJECT(tv_this)) {
636 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, object -> use directly"));
637 } else if (DUK_TVAL_IS_LIGHTFUNC(tv_this)) {
638 /* Lightfuncs are treated like objects and not coerced. */
639 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, lightfunc -> use directly"));
640 } else if (DUK_TVAL_IS_UNDEFINED(tv_this) || DUK_TVAL_IS_NULL(tv_this)) {
641 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, undefined/null -> use global object"));
642 obj_global = thr->builtins[DUK_BIDX_GLOBAL];
643 if (obj_global) {
644 duk_push_hobject(ctx, obj_global);
645 } else {
646 /*
647 * This may only happen if built-ins are being "torn down".
648 * This behavior is out of specification scope.
649 */
650 DUK_D(DUK_DPRINT("this binding: wanted to use global object, but it is NULL -> using undefined instead"));
651 duk_push_undefined(ctx);
652 }
653 duk_replace(ctx, idx_this);
654 } else {
655 DUK_DDD(DUK_DDDPRINT("this binding: non-strict, not object/undefined/null -> use ToObject(value)"));
656 duk_to_object(ctx, idx_this); /* may have side effects */
657 }
658 }
659 }
660
661 /*
662 * Shared helper for non-bound func lookup.
663 *
664 * Returns duk_hobject * to the final non-bound function (NULL for lightfunc).
665 */
666
667 DUK_LOCAL
668 duk_hobject *duk__nonbound_func_lookup(duk_context *ctx,
669 duk_idx_t idx_func,
670 duk_idx_t *out_num_stack_args,
671 duk_tval **out_tv_func,
672 duk_small_uint_t call_flags) {
673 duk_hthread *thr = (duk_hthread *) ctx;
674 duk_tval *tv_func;
675 duk_hobject *func;
676
677 for (;;) {
678 /* Use loop to minimize code size of relookup after bound function case */
679 tv_func = duk_get_tval(ctx, idx_func);
680 DUK_ASSERT(tv_func != NULL);
681
682 if (DUK_TVAL_IS_OBJECT(tv_func)) {
683 func = DUK_TVAL_GET_OBJECT(tv_func);
684 if (!DUK_HOBJECT_IS_CALLABLE(func)) {
685 goto not_callable_error;
686 }
687 if (DUK_HOBJECT_HAS_BOUND(func)) {
688 duk__handle_bound_chain_for_call(thr, idx_func, out_num_stack_args, call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL);
689
690 /* The final object may be a normal function or a lightfunc.
691 * We need to re-lookup tv_func because it may have changed
692 * (also value stack may have been resized). Loop again to
693 * do that; we're guaranteed not to come here again.
694 */
695 DUK_ASSERT(DUK_TVAL_IS_OBJECT(duk_require_tval(ctx, idx_func)) ||
696 DUK_TVAL_IS_LIGHTFUNC(duk_require_tval(ctx, idx_func)));
697 continue;
698 }
699 } else if (DUK_TVAL_IS_LIGHTFUNC(tv_func)) {
700 func = NULL;
701 } else {
702 goto not_callable_error;
703 }
704 break;
705 }
706
707 DUK_ASSERT((DUK_TVAL_IS_OBJECT(tv_func) && DUK_HOBJECT_IS_CALLABLE(DUK_TVAL_GET_OBJECT(tv_func))) ||
708 DUK_TVAL_IS_LIGHTFUNC(tv_func));
709 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
710 DUK_ASSERT(func == NULL || (DUK_HOBJECT_IS_COMPILEDFUNCTION(func) ||
711 DUK_HOBJECT_IS_NATIVEFUNCTION(func)));
712
713 *out_tv_func = tv_func;
714 return func;
715
716 not_callable_error:
717 DUK_ERROR(thr, DUK_ERR_TYPE_ERROR, DUK_STR_NOT_CALLABLE);
718 DUK_UNREACHABLE();
719 return NULL; /* never executed */
720 }
721
722 /*
723 * Value stack resize and stack top adjustment helper
724 *
725 * XXX: This should all be merged to duk_valstack_resize_raw().
726 */
727
728 DUK_LOCAL
729 void duk__adjust_valstack_and_top(duk_hthread *thr, duk_idx_t num_stack_args, duk_idx_t idx_args, duk_idx_t nregs, duk_idx_t nargs, duk_hobject *func) {
730 duk_context *ctx = (duk_context *) thr;
731 duk_size_t vs_min_size;
732 duk_bool_t adjusted_top = 0;
733
734 vs_min_size = (thr->valstack_bottom - thr->valstack) + /* bottom of current func */
735 idx_args; /* bottom of new func */
736
737 if (nregs >= 0) {
738 DUK_ASSERT(nargs >= 0);
739 DUK_ASSERT(nregs >= nargs);
740 vs_min_size += nregs;
741 } else {
742 /* 'func' wants stack "as is" */
743 vs_min_size += num_stack_args; /* num entries of new func at entry */
744 }
745 if (func == NULL || DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
746 vs_min_size += DUK_VALSTACK_API_ENTRY_MINIMUM; /* Duktape/C API guaranteed entries (on top of args) */
747 }
748 vs_min_size += DUK_VALSTACK_INTERNAL_EXTRA; /* + spare */
749
750 /* XXX: Awkward fix for GH-107: we can't resize the value stack to
751 * a size smaller than the current top, so the order of the resize
752 * and adjusting the stack top depends on the current vs. final size
753 * of the value stack. Ideally duk_valstack_resize_raw() would have
754 * a combined algorithm to avoid this.
755 */
756
757 if (vs_min_size < (duk_size_t) (thr->valstack_top - thr->valstack)) {
758 DUK_DDD(DUK_DDDPRINT(("final size smaller, set top before resize")));
759
760 DUK_ASSERT(nregs >= 0); /* can't happen when keeping current stack size */
761 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
762 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
763 adjusted_top = 1;
764 }
765
766 (void) duk_valstack_resize_raw((duk_context *) thr,
767 vs_min_size,
768 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
769 0 /* no compact */ |
770 DUK_VSRESIZE_FLAG_THROW);
771
772 if (!adjusted_top) {
773 if (nregs >= 0) {
774 DUK_ASSERT(nregs >= nargs);
775 duk_set_top(ctx, idx_args + nargs); /* clamp anything above nargs */
776 duk_set_top(ctx, idx_args + nregs); /* extend with undefined */
777 }
778 }
779 }
780
781 /*
782 * Helper for making various kinds of calls.
783 *
784 * Call flags:
785 *
786 * DUK_CALL_FLAG_PROTECTED <--> protected call
787 * DUK_CALL_FLAG_IGNORE_RECLIMIT <--> ignore C recursion limit,
788 * for errhandler calls
789 * DUK_CALL_FLAG_CONSTRUCTOR_CALL <--> for 'new Foo()' calls
790 *
791 * Input stack (thr):
792 *
793 * [ func this arg1 ... argN ]
794 *
795 * Output stack (thr):
796 *
797 * [ retval ] (DUK_EXEC_SUCCESS)
798 * [ errobj ] (DUK_EXEC_ERROR (normal error), protected call)
799 *
800 * Even when executing a protected call an error may be thrown in rare cases.
801 * For instance, if we run out of memory when setting up the return stack
802 * after a caught error, the out of memory is propagated to the caller.
803 * Similarly, API errors (such as invalid input stack shape and invalid
804 * indices) cause an error to propagate out of this function. If there is
805 * no catchpoint for this error, the fatal error handler is called.
806 *
807 * See 'execution.rst'.
808 *
809 * The allowed thread states for making a call are:
810 * - thr matches heap->curr_thread, and thr is already RUNNING
811 * - thr does not match heap->curr_thread (may be NULL or other),
812 * and thr is INACTIVE (in this case, a setjmp() catchpoint is
813 * always used for thread book-keeping to work properly)
814 *
815 * Like elsewhere, gotos are used to keep indent level minimal and
816 * avoiding a dozen helpers with awkward plumbing.
817 *
818 * Note: setjmp() and local variables have a nasty interaction,
819 * see execution.rst; non-volatile locals modified after setjmp()
820 * call are not guaranteed to keep their value.
821 */
822
823 DUK_INTERNAL
824 duk_int_t duk_handle_call(duk_hthread *thr,
825 duk_idx_t num_stack_args,
826 duk_small_uint_t call_flags) {
827 duk_context *ctx = (duk_context *) thr;
828 duk_size_t entry_valstack_bottom_index;
829 duk_size_t entry_valstack_end;
830 duk_size_t entry_callstack_top;
831 duk_size_t entry_catchstack_top;
832 duk_int_t entry_call_recursion_depth;
833 duk_hthread *entry_curr_thread;
834 duk_uint_fast8_t entry_thread_state;
835 duk_instr_t **entry_ptr_curr_pc;
836 volatile duk_bool_t need_setjmp;
837 duk_jmpbuf * volatile old_jmpbuf_ptr = NULL; /* ptr is volatile (not the target) */
838 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
839 duk_idx_t idx_args; /* valstack index of start of args (arg1) (relative to entry valstack_bottom) */
840 duk_idx_t nargs; /* # argument registers target function wants (< 0 => "as is") */
841 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => "as is") */
842 duk_hobject *func; /* 'func' on stack (borrowed reference) */
843 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) or tv_func_copy */
844 duk_tval tv_func_copy; /* to avoid relookups */
845 duk_activation *act;
846 duk_hobject *env;
847 duk_jmpbuf our_jmpbuf;
848 duk_tval tv_tmp;
849 duk_int_t retval = DUK_EXEC_ERROR;
850 duk_ret_t rc;
851
852 DUK_ASSERT(thr != NULL);
853 DUK_ASSERT(ctx != NULL);
854 DUK_ASSERT(num_stack_args >= 0);
855
856 /* XXX: currently NULL allocations are not supported; remove if later allowed */
857 DUK_ASSERT(thr->valstack != NULL);
858 DUK_ASSERT(thr->callstack != NULL);
859 DUK_ASSERT(thr->catchstack != NULL);
860
861 /*
862 * Preliminaries, required by setjmp() handler.
863 *
864 * Must be careful not to throw an unintended error here.
865 *
866 * Note: careful with indices like '-x'; if 'x' is zero, it
867 * refers to valstack_bottom.
868 */
869
870 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
871 entry_valstack_end = (duk_size_t) (thr->valstack_end - thr->valstack);
872 entry_callstack_top = thr->callstack_top;
873 entry_catchstack_top = thr->catchstack_top;
874 entry_call_recursion_depth = thr->heap->call_recursion_depth;
875 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
876 entry_thread_state = thr->state;
877 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
878
879 idx_func = duk_normalize_index(ctx, -num_stack_args - 2); /* idx_func must be valid, note: non-throwing! */
880 idx_args = idx_func + 2; /* idx_args is not necessarily valid if num_stack_args == 0 (idx_args then equals top) */
881
882 /* Need a setjmp() catchpoint if a protected call OR if we need to
883 * do mandatory cleanup.
884 */
885 need_setjmp = ((call_flags & DUK_CALL_FLAG_PROTECTED) != 0) || (thr->heap->curr_thread != thr);
886
887 DUK_DD(DUK_DDPRINT("duk_handle_call: thr=%p, num_stack_args=%ld, "
888 "call_flags=0x%08lx (protected=%ld, ignorerec=%ld, constructor=%ld), need_setjmp=%ld, "
889 "valstack_top=%ld, idx_func=%ld, idx_args=%ld, rec_depth=%ld/%ld, "
890 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
891 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
892 (void *) thr,
893 (long) num_stack_args,
894 (unsigned long) call_flags,
895 (long) ((call_flags & DUK_CALL_FLAG_PROTECTED) != 0 ? 1 : 0),
896 (long) ((call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) != 0 ? 1 : 0),
897 (long) ((call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) != 0 ? 1 : 0),
898 (long) need_setjmp,
899 (long) duk_get_top(ctx),
900 (long) idx_func,
901 (long) idx_args,
902 (long) thr->heap->call_recursion_depth,
903 (long) thr->heap->call_recursion_limit,
904 (long) entry_valstack_bottom_index,
905 (long) entry_callstack_top,
906 (long) entry_catchstack_top,
907 (long) entry_call_recursion_depth,
908 (void *) entry_curr_thread,
909 (long) entry_thread_state));
910
911 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
912 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
913 * activation when side effects occur.
914 */
915 duk_hthread_sync_and_null_currpc(thr);
916
917 /* XXX: Multiple tv_func lookups are now avoided by making a local
918 * copy of tv_func. Another approach would be to compute an offset
919 * for tv_func from valstack bottom and recomputing the tv_func
920 * pointer quickly as valstack + offset instead of calling duk_get_tval().
921 */
922
923 if (idx_func < 0 || idx_args < 0) {
924 /*
925 * Since stack indices are not reliable, we can't do anything useful
926 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
927 * call the fatal error handler.
928 */
929
930 DUK_ERROR(thr, DUK_ERR_API_ERROR, DUK_STR_INVALID_CALL_ARGS);
931 }
932
933 /*
934 * Setup a setjmp() catchpoint first because even the call setup
935 * may fail.
936 */
937
938 if (!need_setjmp) {
939 DUK_DDD(DUK_DDDPRINT("don't need a setjmp catchpoint"));
940 goto handle_call;
941 }
942
943 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
944 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
945
946 if (DUK_SETJMP(thr->heap->lj.jmpbuf_ptr->jb) == 0) {
947 DUK_DDD(DUK_DDDPRINT("setjmp catchpoint setup complete"));
948 goto handle_call;
949 }
950
951 /*
952 * Error during setup, call, or postprocessing of the call.
953 * The error value is in heap->lj.value1.
954 *
955 * Note: any local variables accessed here must have their value
956 * assigned *before* the setjmp() call, OR they must be declared
957 * volatile. Otherwise their value is not guaranteed to be correct.
958 *
959 * The following are such variables:
960 * - duk_handle_call() parameters
961 * - entry_*
962 * - idx_func
963 * - idx_args
964 *
965 * The very first thing we do is restore the previous setjmp catcher.
966 * This means that any error in error handling will propagate outwards
967 * instead of causing a setjmp() re-entry above. The *only* actual
968 * errors that should happen here are allocation errors.
969 */
970
971 DUK_DDD(DUK_DDDPRINT("error caught during protected duk_handle_call(): %!T",
972 (duk_tval *) &thr->heap->lj.value1));
973
974 DUK_ASSERT(thr->heap->lj.type == DUK_LJ_TYPE_THROW);
975 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
976 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
977
978 /* We don't need to sync back thr->curr_pc here because the
979 * bytecode executor always has a setjmp catchpoint which
980 * does that before errors propagate to here.
981 */
982
983 /*
984 * Restore previous setjmp catchpoint
985 */
986
987 /* Note: either pointer may be NULL (at entry), so don't assert */
988 DUK_DDD(DUK_DDDPRINT("restore jmpbuf_ptr: %p -> %p",
989 (void *) (thr && thr->heap ? thr->heap->lj.jmpbuf_ptr : NULL),
990 (void *) old_jmpbuf_ptr));
991
992 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
993
994 if (!(call_flags & DUK_CALL_FLAG_PROTECTED)) {
995 /*
996 * Caller did not request a protected call but a setjmp
997 * catchpoint was set up to allow cleanup. So, clean up
998 * and rethrow.
999 *
1000 * We must restore curr_thread here to ensure that its
1001 * current value doesn't end up pointing to a thread object
1002 * which has been freed. This is now a problem because some
1003 * call sites (namely duk_safe_call()) *first* unwind stacks
1004 * and only then deal with curr_thread. If those call sites
1005 * were fixed, this wouldn't matter here.
1006 *
1007 * Note: this case happens e.g. when heap->curr_thread is
1008 * NULL on entry.
1009 */
1010
1011 DUK_DDD(DUK_DDDPRINT("call is not protected -> clean up and rethrow"));
1012
1013 /* Restore entry thread executor curr_pc stack frame pointer. */
1014 thr->ptr_curr_pc = entry_ptr_curr_pc;
1015
1016 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1017 thr->state = entry_thread_state;
1018 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1019 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1020 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1021
1022 /* XXX: should setjmp catcher be responsible for this instead? */
1023 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1024 duk_err_longjmp(thr);
1025 DUK_UNREACHABLE();
1026 }
1027
1028 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1029 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1030 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1031
1032 /* [ ... func this (crud) errobj ] */
1033
1034 /* XXX: is there space? better implementation: write directly over
1035 * 'func' slot to avoid valstack grow issues.
1036 */
1037 duk_push_tval(ctx, &thr->heap->lj.value1);
1038
1039 /* [ ... func this (crud) errobj ] */
1040
1041 duk_replace(ctx, idx_func);
1042 duk_set_top(ctx, idx_func + 1);
1043
1044 /* [ ... errobj ] */
1045
1046 /* Ensure there is internal valstack spare before we exit; this may
1047 * throw an alloc error. The same guaranteed size must be available
1048 * as before the call. This is not optimal now: we store the valstack
1049 * allocated size during entry; this value may be higher than the
1050 * minimal guarantee for an application.
1051 */
1052
1053 (void) duk_valstack_resize_raw((duk_context *) thr,
1054 entry_valstack_end, /* same as during entry */
1055 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1056 DUK_VSRESIZE_FLAG_COMPACT |
1057 DUK_VSRESIZE_FLAG_THROW);
1058
1059 /* Note: currently a second setjmp restoration is done at the target;
1060 * this is OK, but could be refactored away.
1061 */
1062 retval = DUK_EXEC_ERROR;
1063 goto shrink_and_finished;
1064
1065 handle_call:
1066 /*
1067 * Thread state check and book-keeping.
1068 */
1069
1070 if (thr == thr->heap->curr_thread) {
1071 /* same thread */
1072 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
1073 /* should actually never happen, but check anyway */
1074 goto thread_state_error;
1075 }
1076 } else {
1077 /* different thread */
1078 DUK_ASSERT(thr->heap->curr_thread == NULL ||
1079 thr->heap->curr_thread->state == DUK_HTHREAD_STATE_RUNNING);
1080 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
1081 goto thread_state_error;
1082 }
1083 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
1084 thr->state = DUK_HTHREAD_STATE_RUNNING;
1085
1086 /* Note: multiple threads may be simultaneously in the RUNNING
1087 * state, but not in the same "resume chain".
1088 */
1089 }
1090
1091 DUK_ASSERT(thr->heap->curr_thread == thr);
1092 DUK_ASSERT(thr->state == DUK_HTHREAD_STATE_RUNNING);
1093
1094 /*
1095 * C call recursion depth check, which provides a reasonable upper
1096 * bound on maximum C stack size (arbitrary C stack growth is only
1097 * possible by recursive handle_call / handle_safe_call calls).
1098 */
1099
1100 DUK_ASSERT(thr->heap->call_recursion_depth >= 0);
1101 DUK_ASSERT(thr->heap->call_recursion_depth <= thr->heap->call_recursion_limit);
1102
1103 if (call_flags & DUK_CALL_FLAG_IGNORE_RECLIMIT) {
1104 DUK_DD(DUK_DDPRINT("ignoring reclimit for this call (probably an errhandler call)"));
1105 } else {
1106 if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit) {
1107 /* XXX: error message is a bit misleading: we reached a recursion
1108 * limit which is also essentially the same as a C callstack limit
1109 * (except perhaps with some relaxed threading assumptions).
1110 */
1111 DUK_ERROR(thr, DUK_ERR_RANGE_ERROR, DUK_STR_C_CALLSTACK_LIMIT);
1112 }
1113 thr->heap->call_recursion_depth++;
1114 }
1115
1116 /*
1117 * Check the function type, handle bound function chains, and prepare
1118 * parameters for the rest of the call handling. Also figure out the
1119 * effective 'this' binding, which replaces the current value at
1120 * idx_func + 1.
1121 *
1122 * If the target function is a 'bound' one, follow the chain of 'bound'
1123 * functions until a non-bound function is found. During this process,
1124 * bound arguments are 'prepended' to existing ones, and the "this"
1125 * binding is overridden. See E5 Section 15.3.4.5.1.
1126 *
1127 * Lightfunc detection happens here too. Note that lightweight functions
1128 * can be wrapped by (non-lightweight) bound functions so we must resolve
1129 * the bound function chain first.
1130 */
1131
1132 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
1133 DUK_TVAL_SET_TVAL(&tv_func_copy, tv_func);
1134 tv_func = &tv_func_copy; /* local copy to avoid relookups */
1135
1136 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1137 DUK_ASSERT(func == NULL || (DUK_HOBJECT_IS_COMPILEDFUNCTION(func) ||
1138 DUK_HOBJECT_IS_NATIVEFUNCTION(func)));
1139
1140 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
1141 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
1142 (duk_tval *) duk_get_tval(ctx, idx_func + 1)));
1143
1144 /* These base values are never used, but if the compiler doesn't know
1145 * that DUK_ERROR() won't return, these are needed to silence warnings.
1146 * On the other hand, scan-build will warn about the values not being
1147 * used, so add a DUK_UNREF.
1148 */
1149 nargs = 0; DUK_UNREF(nargs);
1150 nregs = 0; DUK_UNREF(nregs);
1151
1152 if (func == NULL) {
1153 duk_small_uint_t lf_flags;
1154
1155 DUK_DDD(DUK_DDDPRINT("lightfunc call handling"));
1156 DUK_ASSERT(DUK_TVAL_IS_LIGHTFUNC(tv_func));
1157 lf_flags = DUK_TVAL_GET_LIGHTFUNC_FLAGS(tv_func);
1158 nargs = DUK_LFUNC_FLAGS_GET_NARGS(lf_flags);
1159 if (nargs == DUK_LFUNC_NARGS_VARARGS) {
1160 nargs = -1; /* vararg */
1161 }
1162 nregs = nargs;
1163 } else if (DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
1164 nargs = ((duk_hcompiledfunction *) func)->nargs;
1165 nregs = ((duk_hcompiledfunction *) func)->nregs;
1166 DUK_ASSERT(nregs >= nargs);
1167 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
1168 /* Note: nargs (and nregs) may be negative for a native,
1169 * function, which indicates that the function wants the
1170 * input stack "as is" (i.e. handles "vararg" arguments).
1171 */
1172 nargs = ((duk_hnativefunction *) func)->nargs;
1173 nregs = nargs;
1174 } else {
1175 /* XXX: this should be an assert */
1176 DUK_ERROR(thr, DUK_ERR_TYPE_ERROR, DUK_STR_NOT_CALLABLE);
1177 }
1178
1179 /* [ ... func this arg1 ... argN ] */
1180
1181 /*
1182 * Setup a preliminary activation.
1183 *
1184 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
1185 * calls work normally.
1186 */
1187
1188 duk_hthread_callstack_grow(thr);
1189
1190 if (thr->callstack_top > 0) {
1191 /*
1192 * Update idx_retval of current activation.
1193 *
1194 * Although it might seem this is not necessary (bytecode executor
1195 * does this for Ecmascript-to-Ecmascript calls; other calls are
1196 * handled here), this turns out to be necessary for handling yield
1197 * and resume. For them, an Ecmascript-to-native call happens, and
1198 * the Ecmascript call's idx_retval must be set for things to work.
1199 */
1200
1201 (thr->callstack + thr->callstack_top - 1)->idx_retval = entry_valstack_bottom_index + idx_func;
1202 }
1203
1204 DUK_ASSERT(thr->callstack_top < thr->callstack_size);
1205 act = thr->callstack + thr->callstack_top;
1206 thr->callstack_top++;
1207 DUK_ASSERT(thr->callstack_top <= thr->callstack_size);
1208 DUK_ASSERT(thr->valstack_top > thr->valstack_bottom); /* at least effective 'this' */
1209 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func));
1210
1211 act->flags = 0;
1212 if (func == NULL || DUK_HOBJECT_HAS_STRICT(func)) {
1213 act->flags |= DUK_ACT_FLAG_STRICT;
1214 }
1215 if (call_flags & DUK_CALL_FLAG_CONSTRUCTOR_CALL) {
1216 act->flags |= DUK_ACT_FLAG_CONSTRUCT;
1217 /*act->flags |= DUK_ACT_FLAG_PREVENT_YIELD;*/
1218 }
1219 if (func == NULL || DUK_HOBJECT_IS_NATIVEFUNCTION(func)) {
1220 /*act->flags |= DUK_ACT_FLAG_PREVENT_YIELD;*/
1221 }
1222 if (call_flags & DUK_CALL_FLAG_DIRECT_EVAL) {
1223 act->flags |= DUK_ACT_FLAG_DIRECT_EVAL;
1224 }
1225
1226 /* As a first approximation, all calls except Ecmascript-to-Ecmascript
1227 * calls prevent a yield.
1228 */
1229 act->flags |= DUK_ACT_FLAG_PREVENT_YIELD;
1230
1231 act->func = func; /* NULL for lightfunc */
1232 act->var_env = NULL;
1233 act->lex_env = NULL;
1234 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
1235 act->prev_caller = NULL;
1236 #endif
1237 act->curr_pc = NULL;
1238 #if defined(DUK_USE_DEBUGGER_SUPPORT)
1239 act->prev_line = 0;
1240 #endif
1241 act->idx_bottom = entry_valstack_bottom_index + idx_args;
1242 #if 0 /* topmost activation idx_retval is considered garbage, no need to init */
1243 act->idx_retval = 0;
1244 #endif
1245 DUK_TVAL_SET_TVAL(&act->tv_func, tv_func); /* borrowed, no refcount */
1246
1247 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
1248 /* duk_hthread_callstack_unwind() will decrease this on unwind */
1249 thr->callstack_preventcount++;
1250 }
1251
1252 /* XXX: Is this INCREF necessary? 'func' is always a borrowed
1253 * reference reachable through the value stack? If changed, stack
1254 * unwind code also needs to be fixed to match.
1255 */
1256 DUK_HOBJECT_INCREF_ALLOWNULL(thr, func); /* act->func */
1257
1258 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
1259 if (func) {
1260 duk__update_func_caller_prop(thr, func);
1261 }
1262 act = thr->callstack + thr->callstack_top - 1;
1263 #endif
1264
1265 /* [... func this arg1 ... argN] */
1266
1267 /*
1268 * Environment record creation and 'arguments' object creation.
1269 * Named function expression name binding is handled by the
1270 * compiler; the compiled function's parent env will contain
1271 * the (immutable) binding already.
1272 *
1273 * This handling is now identical for C and Ecmascript functions.
1274 * C functions always have the 'NEWENV' flag set, so their
1275 * environment record initialization is delayed (which is good).
1276 *
1277 * Delayed creation (on demand) is handled in duk_js_var.c.
1278 */
1279
1280 DUK_ASSERT(func == NULL || !DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
1281
1282 if (func != NULL && !DUK_HOBJECT_HAS_NEWENV(func)) {
1283 /* use existing env (e.g. for non-strict eval); cannot have
1284 * an own 'arguments' object (but can refer to the existing one)
1285 */
1286
1287 DUK_ASSERT(!DUK_HOBJECT_HAS_CREATEARGS(func));
1288
1289 duk__handle_oldenv_for_call(thr, func, act);
1290
1291 DUK_ASSERT(act->lex_env != NULL);
1292 DUK_ASSERT(act->var_env != NULL);
1293 goto env_done;
1294 }
1295
1296 DUK_ASSERT(func == NULL || DUK_HOBJECT_HAS_NEWENV(func));
1297
1298 if (func == NULL || !DUK_HOBJECT_HAS_CREATEARGS(func)) {
1299 /* no need to create environment record now; leave as NULL */
1300 DUK_ASSERT(act->lex_env == NULL);
1301 DUK_ASSERT(act->var_env == NULL);
1302 goto env_done;
1303 }
1304
1305 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
1306 env = duk_create_activation_environment_record(thr, func, act->idx_bottom);
1307 DUK_ASSERT(env != NULL);
1308
1309 /* [... func this arg1 ... argN envobj] */
1310
1311 DUK_ASSERT(DUK_HOBJECT_HAS_CREATEARGS(func));
1312 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
1313
1314 /* [... func this arg1 ... argN envobj] */
1315
1316 act = thr->callstack + thr->callstack_top - 1;
1317 act->lex_env = env;
1318 act->var_env = env;
1319 DUK_HOBJECT_INCREF(thr, env);
1320 DUK_HOBJECT_INCREF(thr, env); /* XXX: incref by count (2) directly */
1321 duk_pop(ctx);
1322
1323 env_done:
1324 /* [... func this arg1 ... argN] */
1325
1326 /*
1327 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
1328 *
1329 * Value stack may either grow or shrink, depending on the
1330 * number of func registers and the number of actual arguments.
1331 * If nregs >= 0, func wants args clamped to 'nargs'; else it
1332 * wants all args (= 'num_stack_args').
1333 */
1334
1335 duk__adjust_valstack_and_top(thr,
1336 num_stack_args,
1337 idx_args,
1338 nregs,
1339 nargs,
1340 func);
1341
1342 /*
1343 * Determine call type; then setup activation and call
1344 */
1345
1346 if (func != NULL && DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
1347 goto ecmascript_call;
1348 } else {
1349 goto native_call;
1350 }
1351 DUK_UNREACHABLE();
1352
1353 /*
1354 * Native (C) call
1355 */
1356
1357 native_call:
1358 /*
1359 * Shift to new valstack_bottom.
1360 */
1361
1362 thr->valstack_bottom = thr->valstack_bottom + idx_args;
1363 /* keep current valstack_top */
1364 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1365 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
1366 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1367 DUK_ASSERT(func == NULL || ((duk_hnativefunction *) func)->func != NULL);
1368
1369 /* [... func this | arg1 ... argN] ('this' must precede new bottom) */
1370
1371 /*
1372 * Actual function call and return value check.
1373 *
1374 * Return values:
1375 * 0 success, no return value (default to 'undefined')
1376 * 1 success, one return value on top of stack
1377 * < 0 error, throw a "magic" error
1378 * other invalid
1379 */
1380
1381 /* For native calls must be NULL so we don't sync back */
1382 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1383
1384 if (func) {
1385 rc = ((duk_hnativefunction *) func)->func((duk_context *) thr);
1386 } else {
1387 duk_c_function funcptr = DUK_TVAL_GET_LIGHTFUNC_FUNCPTR(tv_func);
1388 rc = funcptr((duk_context *) thr);
1389 }
1390
1391 if (rc < 0) {
1392 duk_error_throw_from_negative_rc(thr, rc);
1393 DUK_UNREACHABLE();
1394 } else if (rc > 1) {
1395 DUK_ERROR(thr, DUK_ERR_API_ERROR, "c function returned invalid rc");
1396 }
1397 DUK_ASSERT(rc == 0 || rc == 1);
1398
1399 /*
1400 * Unwind stack(s) and shift back to old valstack_bottom.
1401 */
1402
1403 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top);
1404 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1405
1406 #if 0 /* should be no need to unwind */
1407 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1408 #endif
1409 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1410
1411 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1412 /* keep current valstack_top */
1413
1414 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1415 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
1416 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1417 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1418
1419 /*
1420 * Manipulate value stack so that return value is on top
1421 * (pushing an 'undefined' if necessary).
1422 */
1423
1424 /* XXX: should this happen in the callee's activation or after unwinding? */
1425 if (rc == 0) {
1426 duk_require_stack(ctx, 1);
1427 duk_push_undefined(ctx);
1428 }
1429 /* [... func this (crud) retval] */
1430
1431 DUK_DDD(DUK_DDDPRINT("native call retval -> %!T (rc=%ld)",
1432 (duk_tval *) duk_get_tval(ctx, -1), (long) rc));
1433
1434 duk_replace(ctx, idx_func);
1435 duk_set_top(ctx, idx_func + 1);
1436
1437 /* [... retval] */
1438
1439 /* Ensure there is internal valstack spare before we exit; this may
1440 * throw an alloc error. The same guaranteed size must be available
1441 * as before the call. This is not optimal now: we store the valstack
1442 * allocated size during entry; this value may be higher than the
1443 * minimal guarantee for an application.
1444 */
1445
1446 (void) duk_valstack_resize_raw((duk_context *) thr,
1447 entry_valstack_end, /* same as during entry */
1448 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1449 DUK_VSRESIZE_FLAG_COMPACT |
1450 DUK_VSRESIZE_FLAG_THROW);
1451
1452
1453 /*
1454 * Shrink checks and return with success.
1455 */
1456
1457 retval = DUK_EXEC_SUCCESS;
1458 goto shrink_and_finished;
1459
1460 /*
1461 * Ecmascript call
1462 */
1463
1464 ecmascript_call:
1465
1466 /*
1467 * Shift to new valstack_bottom.
1468 */
1469
1470 DUK_ASSERT(func != NULL);
1471 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func));
1472 act->curr_pc = DUK_HCOMPILEDFUNCTION_GET_CODE_BASE(thr->heap, (duk_hcompiledfunction *) func);
1473
1474 thr->valstack_bottom = thr->valstack_bottom + idx_args;
1475 /* keep current valstack_top */
1476 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1477 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
1478 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1479
1480 /* [... func this | arg1 ... argN] ('this' must precede new bottom) */
1481
1482 /*
1483 * Bytecode executor call.
1484 *
1485 * Execute bytecode, handling any recursive function calls and
1486 * thread resumptions. Returns when execution would return from
1487 * the entry level activation. When the executor returns, a
1488 * single return value is left on the stack top.
1489 *
1490 * The only possible longjmp() is an error (DUK_LJ_TYPE_THROW),
1491 * other types are handled internally by the executor.
1492 *
1493 */
1494
1495 /* thr->ptr_curr_pc is set by bytecode executor early on entry */
1496 DUK_ASSERT(thr->ptr_curr_pc == NULL);
1497 DUK_DDD(DUK_DDDPRINT("entering bytecode execution"));
1498 duk_js_execute_bytecode(thr);
1499 DUK_DDD(DUK_DDDPRINT("returned from bytecode execution"));
1500
1501 /*
1502 * Unwind stack(s) and shift back to old valstack_bottom.
1503 */
1504
1505 DUK_ASSERT(thr->callstack_top == entry_callstack_top + 1);
1506
1507 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1508 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1509
1510 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1511 /* keep current valstack_top */
1512
1513 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1514 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
1515 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1516 DUK_ASSERT(thr->valstack_top - thr->valstack_bottom >= idx_func + 1);
1517
1518 /*
1519 * Manipulate value stack so that return value is on top.
1520 */
1521
1522 /* [... func this (crud) retval] */
1523
1524 duk_replace(ctx, idx_func);
1525 duk_set_top(ctx, idx_func + 1);
1526
1527 /* [... retval] */
1528
1529 /* Ensure there is internal valstack spare before we exit; this may
1530 * throw an alloc error. The same guaranteed size must be available
1531 * as before the call. This is not optimal now: we store the valstack
1532 * allocated size during entry; this value may be higher than the
1533 * minimal guarantee for an application.
1534 */
1535
1536 (void) duk_valstack_resize_raw((duk_context *) thr,
1537 entry_valstack_end, /* same as during entry */
1538 DUK_VSRESIZE_FLAG_SHRINK | /* flags */
1539 DUK_VSRESIZE_FLAG_COMPACT |
1540 DUK_VSRESIZE_FLAG_THROW);
1541
1542 /*
1543 * Shrink checks and return with success.
1544 */
1545
1546 retval = DUK_EXEC_SUCCESS;
1547 goto shrink_and_finished;
1548
1549 shrink_and_finished:
1550 #if defined(DUK_USE_FASTINT)
1551 /* Explicit check for fastint downgrade. */
1552 {
1553 duk_tval *tv_fi;
1554 tv_fi = duk_get_tval(ctx, -1);
1555 DUK_ASSERT(tv_fi != NULL);
1556 DUK_TVAL_CHKFAST_INPLACE(tv_fi);
1557 }
1558 #endif
1559
1560 /* these are "soft" shrink checks, whose failures are ignored */
1561 /* XXX: would be nice if fast path was inlined */
1562 duk_hthread_catchstack_shrink_check(thr);
1563 duk_hthread_callstack_shrink_check(thr);
1564 goto finished;
1565
1566 finished:
1567 if (need_setjmp) {
1568 /* Note: either pointer may be NULL (at entry), so don't assert;
1569 * this is now done potentially twice, which is OK
1570 */
1571 DUK_DDD(DUK_DDDPRINT("restore jmpbuf_ptr: %p -> %p (possibly already done)",
1572 (void *) (thr && thr->heap ? thr->heap->lj.jmpbuf_ptr : NULL),
1573 (void *) old_jmpbuf_ptr));
1574 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1575
1576 /* These are just convenience "wiping" of state */
1577 thr->heap->lj.type = DUK_LJ_TYPE_UNKNOWN;
1578 thr->heap->lj.iserror = 0;
1579
1580 /* Side effects should not be an issue here: tv_tmp is local and
1581 * thr->heap (and thr->heap->lj) have a stable pointer. Finalizer
1582 * runs etc capture even out-of-memory errors so nothing should
1583 * throw here.
1584 */
1585 DUK_TVAL_SET_TVAL(&tv_tmp, &thr->heap->lj.value1);
1586 DUK_TVAL_SET_UNDEFINED_UNUSED(&thr->heap->lj.value1);
1587 DUK_TVAL_DECREF(thr, &tv_tmp);
1588
1589 DUK_TVAL_SET_TVAL(&tv_tmp, &thr->heap->lj.value2);
1590 DUK_TVAL_SET_UNDEFINED_UNUSED(&thr->heap->lj.value2);
1591 DUK_TVAL_DECREF(thr, &tv_tmp);
1592
1593 DUK_DDD(DUK_DDDPRINT("setjmp catchpoint torn down"));
1594 }
1595
1596 /* Restore entry thread executor curr_pc stack frame pointer. */
1597 thr->ptr_curr_pc = entry_ptr_curr_pc;
1598
1599 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1600 thr->state = (duk_uint8_t) entry_thread_state;
1601
1602 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
1603 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
1604 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
1605
1606 thr->heap->call_recursion_depth = entry_call_recursion_depth;
1607
1608 /* If the debugger is active we need to force an interrupt so that
1609 * debugger breakpoints are rechecked. This is important for function
1610 * calls caused by side effects (e.g. when doing a DUK_OP_GETPROP), see
1611 * GH-303. Only needed for success path, error path always causes a
1612 * breakpoint recheck in the executor. It would be enough to set this
1613 * only when returning to an Ecmascript activation, but setting the flag
1614 * on every return should have no ill effect.
1615 */
1616 #if defined(DUK_USE_DEBUGGER_SUPPORT)
1617 if (DUK_HEAP_IS_DEBUGGER_ATTACHED(thr->heap)) {
1618 DUK_DD(DUK_DDPRINT("returning to ecmascript activation with debugger enabled, force interrupt"));
1619 DUK_ASSERT(thr->interrupt_counter <= thr->interrupt_init);
1620 thr->interrupt_init -= thr->interrupt_counter;
1621 thr->interrupt_counter = 0;
1622 thr->heap->dbg_force_restart = 1;
1623 }
1624 #endif
1625
1626 #if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
1627 duk__interrupt_fixup(thr, entry_curr_thread);
1628 #endif
1629
1630 return retval;
1631
1632 thread_state_error:
1633 DUK_ERROR(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for call (%ld)", (long) thr->state);
1634 DUK_UNREACHABLE();
1635 return DUK_EXEC_ERROR; /* never executed */
1636 }
1637
1638 /*
1639 * Manipulate value stack so that exactly 'num_stack_rets' return
1640 * values are at 'idx_retbase' in every case, assuming there are
1641 * 'rc' return values on top of stack.
1642 *
1643 * This is a bit tricky, because the called C function operates in
1644 * the same activation record and may have e.g. popped the stack
1645 * empty (below idx_retbase).
1646 */
1647
1648 DUK_LOCAL void duk__safe_call_adjust_valstack(duk_hthread *thr, duk_idx_t idx_retbase, duk_idx_t num_stack_rets, duk_idx_t num_actual_rets) {
1649 duk_context *ctx = (duk_context *) thr;
1650 duk_idx_t idx_rcbase;
1651
1652 DUK_ASSERT(thr != NULL);
1653 DUK_ASSERT(idx_retbase >= 0);
1654 DUK_ASSERT(num_stack_rets >= 0);
1655 DUK_ASSERT(num_actual_rets >= 0);
1656
1657 idx_rcbase = duk_get_top(ctx) - num_actual_rets; /* base of known return values */
1658
1659 DUK_DDD(DUK_DDDPRINT("adjust valstack after func call: "
1660 "num_stack_rets=%ld, num_actual_rets=%ld, stack_top=%ld, idx_retbase=%ld, idx_rcbase=%ld",
1661 (long) num_stack_rets, (long) num_actual_rets, (long) duk_get_top(ctx),
1662 (long) idx_retbase, (long) idx_rcbase));
1663
1664 DUK_ASSERT(idx_rcbase >= 0); /* caller must check */
1665
1666 /* ensure space for final configuration (idx_retbase + num_stack_rets) and
1667 * intermediate configurations
1668 */
1669 duk_require_stack_top(ctx,
1670 (idx_rcbase > idx_retbase ? idx_rcbase : idx_retbase) +
1671 num_stack_rets);
1672
1673 /* chop extra retvals away / extend with undefined */
1674 duk_set_top(ctx, idx_rcbase + num_stack_rets);
1675
1676 if (idx_rcbase >= idx_retbase) {
1677 duk_idx_t count = idx_rcbase - idx_retbase;
1678 duk_idx_t i;
1679
1680 DUK_DDD(DUK_DDDPRINT("elements at/after idx_retbase have enough to cover func retvals "
1681 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
1682
1683 /* nuke values at idx_retbase to get the first retval (initially
1684 * at idx_rcbase) to idx_retbase
1685 */
1686
1687 DUK_ASSERT(count >= 0);
1688
1689 for (i = 0; i < count; i++) {
1690 /* XXX: inefficient; block remove primitive */
1691 duk_remove(ctx, idx_retbase);
1692 }
1693 } else {
1694 duk_idx_t count = idx_retbase - idx_rcbase;
1695 duk_idx_t i;
1696
1697 DUK_DDD(DUK_DDDPRINT("not enough elements at/after idx_retbase to cover func retvals "
1698 "(idx_retbase=%ld, idx_rcbase=%ld)", (long) idx_retbase, (long) idx_rcbase));
1699
1700 /* insert 'undefined' values at idx_rcbase to get the
1701 * return values to idx_retbase
1702 */
1703
1704 DUK_ASSERT(count > 0);
1705
1706 for (i = 0; i < count; i++) {
1707 /* XXX: inefficient; block insert primitive */
1708 duk_push_undefined(ctx);
1709 duk_insert(ctx, idx_rcbase);
1710 }
1711 }
1712 }
1713
1714 /*
1715 * Make a "C protected call" within the current activation.
1716 *
1717 * The allowed thread states for making a call are the same as for
1718 * duk_handle_call().
1719 *
1720 * Note that like duk_handle_call(), even if this call is protected,
1721 * there are a few situations where the current (pre-entry) setjmp
1722 * catcher (or a fatal error handler if no such catcher exists) is
1723 * invoked:
1724 *
1725 * - Blatant API argument errors (e.g. num_stack_args is invalid,
1726 * so we can't form a reasonable return stack)
1727 *
1728 * - Errors during error handling, e.g. failure to reallocate
1729 * space in the value stack due to an alloc error
1730 *
1731 * Such errors propagate outwards, ultimately to the fatal error
1732 * handler if nothing else.
1733 */
1734
1735 /* XXX: bump preventcount by one for the duration of this call? */
1736
1737 DUK_INTERNAL
1738 duk_int_t duk_handle_safe_call(duk_hthread *thr,
1739 duk_safe_call_function func,
1740 duk_idx_t num_stack_args,
1741 duk_idx_t num_stack_rets) {
1742 duk_context *ctx = (duk_context *) thr;
1743 duk_size_t entry_valstack_bottom_index;
1744 duk_size_t entry_callstack_top;
1745 duk_size_t entry_catchstack_top;
1746 duk_int_t entry_call_recursion_depth;
1747 duk_hthread *entry_curr_thread;
1748 duk_uint_fast8_t entry_thread_state;
1749 duk_instr_t **entry_ptr_curr_pc;
1750 duk_jmpbuf *old_jmpbuf_ptr = NULL;
1751 duk_jmpbuf our_jmpbuf;
1752 duk_tval tv_tmp;
1753 duk_idx_t idx_retbase;
1754 duk_int_t retval;
1755 duk_ret_t rc;
1756
1757 DUK_ASSERT(thr != NULL);
1758 DUK_ASSERT(ctx != NULL);
1759
1760 /* Note: careful with indices like '-x'; if 'x' is zero, it refers to bottom */
1761 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
1762 entry_callstack_top = thr->callstack_top;
1763 entry_catchstack_top = thr->catchstack_top;
1764 entry_call_recursion_depth = thr->heap->call_recursion_depth;
1765 entry_curr_thread = thr->heap->curr_thread; /* Note: may be NULL if first call */
1766 entry_thread_state = thr->state;
1767 entry_ptr_curr_pc = thr->ptr_curr_pc; /* may be NULL */
1768 idx_retbase = duk_get_top(ctx) - num_stack_args; /* Note: not a valid stack index if num_stack_args == 0 */
1769
1770 /* Note: cannot portably debug print a function pointer, hence 'func' not printed! */
1771 DUK_DD(DUK_DDPRINT("duk_handle_safe_call: thr=%p, num_stack_args=%ld, num_stack_rets=%ld, "
1772 "valstack_top=%ld, idx_retbase=%ld, rec_depth=%ld/%ld, "
1773 "entry_valstack_bottom_index=%ld, entry_callstack_top=%ld, entry_catchstack_top=%ld, "
1774 "entry_call_recursion_depth=%ld, entry_curr_thread=%p, entry_thread_state=%ld",
1775 (void *) thr,
1776 (long) num_stack_args,
1777 (long) num_stack_rets,
1778 (long) duk_get_top(ctx),
1779 (long) idx_retbase,
1780 (long) thr->heap->call_recursion_depth,
1781 (long) thr->heap->call_recursion_limit,
1782 (long) entry_valstack_bottom_index,
1783 (long) entry_callstack_top,
1784 (long) entry_catchstack_top,
1785 (long) entry_call_recursion_depth,
1786 (void *) entry_curr_thread,
1787 (long) entry_thread_state));
1788
1789 if (idx_retbase < 0) {
1790 /*
1791 * Since stack indices are not reliable, we can't do anything useful
1792 * here. Invoke the existing setjmp catcher, or if it doesn't exist,
1793 * call the fatal error handler.
1794 */
1795
1796 DUK_ERROR(thr, DUK_ERR_API_ERROR, DUK_STR_INVALID_CALL_ARGS);
1797 }
1798
1799 /* setjmp catchpoint setup */
1800
1801 old_jmpbuf_ptr = thr->heap->lj.jmpbuf_ptr;
1802 thr->heap->lj.jmpbuf_ptr = &our_jmpbuf;
1803
1804 if (DUK_SETJMP(thr->heap->lj.jmpbuf_ptr->jb) == 0) {
1805 goto handle_call;
1806 }
1807
1808 /*
1809 * Error during call. The error value is at heap->lj.value1.
1810 *
1811 * Careful with variable accesses here; must be assigned to before
1812 * setjmp() or be declared volatile. See duk_handle_call().
1813 *
1814 * The following are such variables:
1815 * - duk_handle_safe_call() parameters
1816 * - entry_*
1817 * - idx_retbase
1818 *
1819 * The very first thing we do is restore the previous setjmp catcher.
1820 * This means that any error in error handling will propagate outwards
1821 * instead of causing a setjmp() re-entry above. The *only* actual
1822 * errors that should happen here are allocation errors.
1823 */
1824
1825 DUK_DDD(DUK_DDDPRINT("error caught during protected duk_handle_safe_call()"));
1826
1827 DUK_ASSERT(thr->heap->lj.type == DUK_LJ_TYPE_THROW);
1828 DUK_ASSERT(thr->callstack_top >= entry_callstack_top);
1829 DUK_ASSERT(thr->catchstack_top >= entry_catchstack_top);
1830
1831 /* Note: either pointer may be NULL (at entry), so don't assert;
1832 * these are now restored twice which is OK.
1833 */
1834 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1835
1836 duk_hthread_catchstack_unwind(thr, entry_catchstack_top);
1837 duk_hthread_callstack_unwind(thr, entry_callstack_top);
1838 thr->valstack_bottom = thr->valstack + entry_valstack_bottom_index;
1839
1840 /* [ ... | (crud) ] */
1841
1842 /* XXX: space in valstack? see discussion in duk_handle_call. */
1843 duk_push_tval(ctx, &thr->heap->lj.value1);
1844
1845 /* [ ... | (crud) errobj ] */
1846
1847 DUK_ASSERT(duk_get_top(ctx) >= 1); /* at least errobj must be on stack */
1848
1849 /* check that the valstack has space for the final amount and any
1850 * intermediate space needed; this is unoptimal but should be safe
1851 */
1852 duk_require_stack_top(ctx, idx_retbase + num_stack_rets); /* final configuration */
1853 duk_require_stack(ctx, num_stack_rets);
1854
1855 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, 1); /* 1 = num actual 'return values' */
1856
1857 /* [ ... | ] or [ ... | errobj (M * undefined)] where M = num_stack_rets - 1 */
1858
1859 retval = DUK_EXEC_ERROR;
1860 goto shrink_and_finished;
1861
1862 /*
1863 * Handle call (inside setjmp)
1864 */
1865
1866 handle_call:
1867
1868 DUK_DDD(DUK_DDDPRINT("safe_call setjmp catchpoint setup complete"));
1869
1870 /*
1871 * Thread state check and book-keeping.
1872 */
1873
1874 if (thr == thr->heap->curr_thread) {
1875 /* same thread */
1876 if (thr->state != DUK_HTHREAD_STATE_RUNNING) {
1877 /* should actually never happen, but check anyway */
1878 goto thread_state_error;
1879 }
1880 } else {
1881 /* different thread */
1882 DUK_ASSERT(thr->heap->curr_thread == NULL ||
1883 thr->heap->curr_thread->state == DUK_HTHREAD_STATE_RUNNING);
1884 if (thr->state != DUK_HTHREAD_STATE_INACTIVE) {
1885 goto thread_state_error;
1886 }
1887 DUK_HEAP_SWITCH_THREAD(thr->heap, thr);
1888 thr->state = DUK_HTHREAD_STATE_RUNNING;
1889
1890 /* Note: multiple threads may be simultaneously in the RUNNING
1891 * state, but not in the same "resume chain".
1892 */
1893 }
1894
1895 DUK_ASSERT(thr->heap->curr_thread == thr);
1896 DUK_ASSERT(thr->state == DUK_HTHREAD_STATE_RUNNING);
1897
1898 /*
1899 * Recursion limit check.
1900 *
1901 * Note: there is no need for an "ignore recursion limit" flag
1902 * for duk_handle_safe_call now.
1903 */
1904
1905 DUK_ASSERT(thr->heap->call_recursion_depth >= 0);
1906 DUK_ASSERT(thr->heap->call_recursion_depth <= thr->heap->call_recursion_limit);
1907 if (thr->heap->call_recursion_depth >= thr->heap->call_recursion_limit) {
1908 /* XXX: error message is a bit misleading: we reached a recursion
1909 * limit which is also essentially the same as a C callstack limit
1910 * (except perhaps with some relaxed threading assumptions).
1911 */
1912 DUK_ERROR(thr, DUK_ERR_RANGE_ERROR, DUK_STR_C_CALLSTACK_LIMIT);
1913 }
1914 thr->heap->call_recursion_depth++;
1915
1916 /*
1917 * Valstack spare check
1918 */
1919
1920 duk_require_stack(ctx, 0); /* internal spare */
1921
1922 /*
1923 * Make the C call
1924 */
1925
1926 rc = func(ctx);
1927
1928 DUK_DDD(DUK_DDDPRINT("safe_call, func rc=%ld", (long) rc));
1929
1930 /*
1931 * Valstack manipulation for results
1932 */
1933
1934 /* we're running inside the caller's activation, so no change in call/catch stack or valstack bottom */
1935 DUK_ASSERT(thr->callstack_top == entry_callstack_top);
1936 DUK_ASSERT(thr->catchstack_top == entry_catchstack_top);
1937 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
1938 DUK_ASSERT((duk_size_t) (thr->valstack_bottom - thr->valstack) == entry_valstack_bottom_index);
1939 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
1940 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
1941
1942 if (rc < 0) {
1943 duk_error_throw_from_negative_rc(thr, rc);
1944 }
1945 DUK_ASSERT(rc >= 0);
1946
1947 if (duk_get_top(ctx) < rc) {
1948 DUK_ERROR(thr, DUK_ERR_API_ERROR, "not enough stack values for safe_call rc");
1949 }
1950
1951 duk__safe_call_adjust_valstack(thr, idx_retbase, num_stack_rets, rc);
1952
1953 /* Note: no need from callstack / catchstack shrink check */
1954 retval = DUK_EXEC_SUCCESS;
1955 goto finished;
1956
1957 shrink_and_finished:
1958 /* these are "soft" shrink checks, whose failures are ignored */
1959 /* XXX: would be nice if fast path was inlined */
1960 duk_hthread_catchstack_shrink_check(thr);
1961 duk_hthread_callstack_shrink_check(thr);
1962 goto finished;
1963
1964 finished:
1965 /* Note: either pointer may be NULL (at entry), so don't assert */
1966 thr->heap->lj.jmpbuf_ptr = old_jmpbuf_ptr;
1967
1968 /* These are just convenience "wiping" of state */
1969 thr->heap->lj.type = DUK_LJ_TYPE_UNKNOWN;
1970 thr->heap->lj.iserror = 0;
1971
1972 /* Side effects should not be an issue here: tv_tmp is local and
1973 * thr->heap (and thr->heap->lj) have a stable pointer. Finalizer
1974 * runs etc capture even out-of-memory errors so nothing should
1975 * throw here.
1976 */
1977 DUK_TVAL_SET_TVAL(&tv_tmp, &thr->heap->lj.value1);
1978 DUK_TVAL_SET_UNDEFINED_UNUSED(&thr->heap->lj.value1);
1979 DUK_TVAL_DECREF(thr, &tv_tmp);
1980
1981 DUK_TVAL_SET_TVAL(&tv_tmp, &thr->heap->lj.value2);
1982 DUK_TVAL_SET_UNDEFINED_UNUSED(&thr->heap->lj.value2);
1983 DUK_TVAL_DECREF(thr, &tv_tmp);
1984
1985 DUK_DDD(DUK_DDDPRINT("setjmp catchpoint torn down"));
1986
1987 /* Restore entry thread executor curr_pc stack frame pointer. */
1988 thr->ptr_curr_pc = entry_ptr_curr_pc;
1989
1990 /* XXX: because we unwind stacks above, thr->heap->curr_thread is at
1991 * risk of pointing to an already freed thread. This was indeed the
1992 * case in test-bug-multithread-valgrind.c, until duk_handle_call()
1993 * was fixed to restore thr->heap->curr_thread before rethrowing an
1994 * uncaught error.
1995 */
1996 DUK_HEAP_SWITCH_THREAD(thr->heap, entry_curr_thread); /* may be NULL */
1997 thr->state = (duk_uint8_t) entry_thread_state;
1998
1999 DUK_ASSERT((thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread == NULL) || /* first call */
2000 (thr->state == DUK_HTHREAD_STATE_INACTIVE && thr->heap->curr_thread != NULL) || /* other call */
2001 (thr->state == DUK_HTHREAD_STATE_RUNNING && thr->heap->curr_thread == thr)); /* current thread */
2002
2003 thr->heap->call_recursion_depth = entry_call_recursion_depth;
2004
2005 /* stack discipline consistency check */
2006 DUK_ASSERT(duk_get_top(ctx) == idx_retbase + num_stack_rets);
2007
2008 /* A debugger forced interrupt check is not needed here, as
2009 * problematic safe calls are not caused by side effects.
2010 */
2011
2012 #if defined(DUK_USE_INTERRUPT_COUNTER) && defined(DUK_USE_DEBUG)
2013 duk__interrupt_fixup(thr, entry_curr_thread);
2014 #endif
2015
2016 return retval;
2017
2018 thread_state_error:
2019 DUK_ERROR(thr, DUK_ERR_TYPE_ERROR, "invalid thread state for safe_call (%ld)", (long) thr->state);
2020 DUK_UNREACHABLE();
2021 return DUK_EXEC_ERROR; /* never executed */
2022 }
2023
2024 /*
2025 * Helper for handling an Ecmascript-to-Ecmascript call or an Ecmascript
2026 * function (initial) Duktape.Thread.resume().
2027 *
2028 * Compared to normal calls handled by duk_handle_call(), there are a
2029 * bunch of differences:
2030 *
2031 * - the call is never protected
2032 * - there is no C recursion depth increase (hence an "ignore recursion
2033 * limit" flag is not applicable)
2034 * - instead of making the call, this helper just performs the thread
2035 * setup and returns; the bytecode executor then restarts execution
2036 * internally
2037 * - ecmascript functions are never 'vararg' functions (they access
2038 * varargs through the 'arguments' object)
2039 *
2040 * The callstack of the target contains an earlier Ecmascript call in case
2041 * of an Ecmascript-to-Ecmascript call (whose idx_retval is updated), or
2042 * is empty in case of an initial Duktape.Thread.resume().
2043 *
2044 * The first thing to do here is to figure out whether an ecma-to-ecma
2045 * call is actually possible. It's not always the case if the target is
2046 * a bound function; the final function may be native. In that case,
2047 * return an error so caller can fall back to a normal call path.
2048 */
2049
2050 DUK_INTERNAL
2051 duk_bool_t duk_handle_ecma_call_setup(duk_hthread *thr,
2052 duk_idx_t num_stack_args,
2053 duk_small_uint_t call_flags) {
2054 duk_context *ctx = (duk_context *) thr;
2055 duk_size_t entry_valstack_bottom_index;
2056 duk_idx_t idx_func; /* valstack index of 'func' and retval (relative to entry valstack_bottom) */
2057 duk_idx_t idx_args; /* valstack index of start of args (arg1) (relative to entry valstack_bottom) */
2058 duk_idx_t nargs; /* # argument registers target function wants (< 0 => never for ecma calls) */
2059 duk_idx_t nregs; /* # total registers target function wants on entry (< 0 => never for ecma calls) */
2060 duk_hobject *func; /* 'func' on stack (borrowed reference) */
2061 duk_tval *tv_func; /* duk_tval ptr for 'func' on stack (borrowed reference) */
2062 duk_activation *act;
2063 duk_hobject *env;
2064 duk_bool_t use_tailcall;
2065 duk_instr_t **entry_ptr_curr_pc;
2066
2067 DUK_ASSERT(thr != NULL);
2068 DUK_ASSERT(ctx != NULL);
2069 DUK_ASSERT(!((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 && (call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0));
2070
2071 /* XXX: assume these? */
2072 DUK_ASSERT(thr->valstack != NULL);
2073 DUK_ASSERT(thr->callstack != NULL);
2074 DUK_ASSERT(thr->catchstack != NULL);
2075
2076 /* no need to handle thread state book-keeping here */
2077 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ||
2078 (thr->state == DUK_HTHREAD_STATE_RUNNING &&
2079 thr->heap->curr_thread == thr));
2080
2081 /* If thr->ptr_curr_pc is set, sync curr_pc to act->pc. Then NULL
2082 * thr->ptr_curr_pc so that it's not accidentally used with an incorrect
2083 * activation when side effects occur. If we end up not making the
2084 * call we must restore the value.
2085 */
2086 entry_ptr_curr_pc = thr->ptr_curr_pc;
2087 duk_hthread_sync_and_null_currpc(thr);
2088
2089 /* if a tail call:
2090 * - an Ecmascript activation must be on top of the callstack
2091 * - there cannot be any active catchstack entries
2092 */
2093 #ifdef DUK_USE_ASSERTIONS
2094 if (call_flags & DUK_CALL_FLAG_IS_TAILCALL) {
2095 duk_size_t our_callstack_index;
2096 duk_size_t i;
2097
2098 DUK_ASSERT(thr->callstack_top >= 1);
2099 our_callstack_index = thr->callstack_top - 1;
2100 DUK_ASSERT_DISABLE(our_callstack_index >= 0);
2101 DUK_ASSERT(our_callstack_index < thr->callstack_size);
2102 DUK_ASSERT(DUK_ACT_GET_FUNC(thr->callstack + our_callstack_index) != NULL);
2103 DUK_ASSERT(DUK_HOBJECT_IS_COMPILEDFUNCTION(DUK_ACT_GET_FUNC(thr->callstack + our_callstack_index)));
2104
2105 /* No entry in the catchstack which would actually catch a
2106 * throw can refer to the callstack entry being reused.
2107 * There *can* be catchstack entries referring to the current
2108 * callstack entry as long as they don't catch (e.g. label sites).
2109 */
2110
2111 for (i = 0; i < thr->catchstack_top; i++) {
2112 DUK_ASSERT(thr->catchstack[i].callstack_index < our_callstack_index || /* refer to callstack entries below current */
2113 DUK_CAT_GET_TYPE(thr->catchstack + i) == DUK_CAT_TYPE_LABEL); /* or a non-catching entry */
2114 }
2115 }
2116 #endif /* DUK_USE_ASSERTIONS */
2117
2118 entry_valstack_bottom_index = (duk_size_t) (thr->valstack_bottom - thr->valstack);
2119 idx_func = duk_normalize_index(thr, -num_stack_args - 2);
2120 idx_args = idx_func + 2;
2121
2122 DUK_DD(DUK_DDPRINT("handle_ecma_call_setup: thr=%p, "
2123 "num_stack_args=%ld, call_flags=0x%08lx (resume=%ld, tailcall=%ld), "
2124 "idx_func=%ld, idx_args=%ld, entry_valstack_bottom_index=%ld",
2125 (void *) thr,
2126 (long) num_stack_args,
2127 (unsigned long) call_flags,
2128 (long) ((call_flags & DUK_CALL_FLAG_IS_RESUME) != 0 ? 1 : 0),
2129 (long) ((call_flags & DUK_CALL_FLAG_IS_TAILCALL) != 0 ? 1 : 0),
2130 (long) idx_func,
2131 (long) idx_args,
2132 (long) entry_valstack_bottom_index));
2133
2134 if (idx_func < 0 || idx_args < 0) {
2135 /* XXX: assert? compiler is responsible for this never happening */
2136 DUK_ERROR(thr, DUK_ERR_API_ERROR, DUK_STR_INVALID_CALL_ARGS);
2137 }
2138
2139 /*
2140 * Check the function type, handle bound function chains, and prepare
2141 * parameters for the rest of the call handling. Also figure out the
2142 * effective 'this' binding, which replaces the current value at
2143 * idx_func + 1.
2144 *
2145 * If the target function is a 'bound' one, follow the chain of 'bound'
2146 * functions until a non-bound function is found. During this process,
2147 * bound arguments are 'prepended' to existing ones, and the "this"
2148 * binding is overridden. See E5 Section 15.3.4.5.1.
2149 *
2150 * If the final target function cannot be handled by an ecma-to-ecma
2151 * call, return to the caller with a return value indicating this case.
2152 * The bound chain is resolved and the caller can resume with a plain
2153 * function call.
2154 */
2155
2156 func = duk__nonbound_func_lookup(ctx, idx_func, &num_stack_args, &tv_func, call_flags);
2157 if (func == NULL || !DUK_HOBJECT_IS_COMPILEDFUNCTION(func)) {
2158 DUK_DDD(DUK_DDDPRINT("final target is a lightfunc/nativefunc, cannot do ecma-to-ecma call"));
2159 thr->ptr_curr_pc = entry_ptr_curr_pc;
2160 return 0;
2161 }
2162 /* XXX: tv_func is not actually needed */
2163
2164 DUK_ASSERT(func != NULL);
2165 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func));
2166 DUK_ASSERT(DUK_HOBJECT_IS_COMPILEDFUNCTION(func));
2167
2168 duk__coerce_effective_this_binding(thr, func, idx_func + 1);
2169 DUK_DDD(DUK_DDDPRINT("effective 'this' binding is: %!T",
2170 duk_get_tval(ctx, idx_func + 1)));
2171
2172 nargs = ((duk_hcompiledfunction *) func)->nargs;
2173 nregs = ((duk_hcompiledfunction *) func)->nregs;
2174 DUK_ASSERT(nregs >= nargs);
2175
2176 /* [ ... func this arg1 ... argN ] */
2177
2178 /*
2179 * Preliminary activation record and valstack manipulation.
2180 * The concrete actions depend on whether the we're dealing
2181 * with a tail call (reuse an existing activation), a resume,
2182 * or a normal call.
2183 *
2184 * The basic actions, in varying order, are:
2185 *
2186 * - Check stack size for call handling
2187 * - Grow call stack if necessary (non-tail-calls)
2188 * - Update current activation (idx_retval) if necessary
2189 * (non-tail, non-resume calls)
2190 * - Move start of args (idx_args) to valstack bottom
2191 * (tail calls)
2192 *
2193 * Don't touch valstack_bottom or valstack_top yet so that Duktape API
2194 * calls work normally.
2195 */
2196
2197 /* XXX: some overlapping code; cleanup */
2198 use_tailcall = call_flags & DUK_CALL_FLAG_IS_TAILCALL;
2199 #if !defined(DUK_USE_TAILCALL)
2200 DUK_ASSERT(use_tailcall == 0); /* compiler ensures this */
2201 #endif
2202 if (use_tailcall) {
2203 /* tailcall cannot be flagged to resume calls, and a
2204 * previous frame must exist
2205 */
2206 DUK_ASSERT(thr->callstack_top >= 1);
2207 DUK_ASSERT((call_flags & DUK_CALL_FLAG_IS_RESUME) == 0);
2208
2209 act = thr->callstack + thr->callstack_top - 1;
2210 if (act->flags & DUK_ACT_FLAG_PREVENT_YIELD) {
2211 /* See: test-bug-tailcall-preventyield-assert.c. */
2212 DUK_DDD(DUK_DDDPRINT("tail call prevented by current activation having DUK_ACT_FLAG_PREVENTYIELD"));
2213 use_tailcall = 0;
2214 } else if (DUK_HOBJECT_HAS_NOTAIL(func)) {
2215 DUK_D(DUK_DPRINT("tail call prevented by function having a notail flag"));
2216 use_tailcall = 0;
2217 }
2218 }
2219
2220 if (use_tailcall) {
2221 duk_tval *tv1, *tv2;
2222 duk_tval tv_tmp;
2223 duk_size_t cs_index;
2224 duk_int_t i_stk; /* must be signed for loop structure */
2225 duk_idx_t i_arg;
2226
2227 /*
2228 * Tailcall handling
2229 *
2230 * Although the callstack entry is reused, we need to explicitly unwind
2231 * the current activation (or simulate an unwind). In particular, the
2232 * current activation must be closed, otherwise something like
2233 * test-bug-reduce-judofyr.js results. Also catchstack needs be unwound
2234 * because there may be non-error-catching label entries in valid tail calls.
2235 */
2236
2237 DUK_DDD(DUK_DDDPRINT("is tail call, reusing activation at callstack top, at index %ld",
2238 (long) (thr->callstack_top - 1)));
2239
2240 /* 'act' already set above */
2241
2242 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func));
2243 DUK_ASSERT(!DUK_HOBJECT_HAS_NATIVEFUNCTION(func));
2244 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func));
2245 DUK_ASSERT((act->flags & DUK_ACT_FLAG_PREVENT_YIELD) == 0);
2246
2247 /* Unwind catchstack entries referring to the callstack entry we're reusing */
2248 cs_index = thr->callstack_top - 1;
2249 DUK_ASSERT(thr->catchstack_top <= DUK_INT_MAX); /* catchstack limits */
2250 for (i_stk = (duk_int_t) (thr->catchstack_top - 1); i_stk >= 0; i_stk--) {
2251 duk_catcher *cat = thr->catchstack + i_stk;
2252 if (cat->callstack_index != cs_index) {
2253 /* 'i' is the first entry we'll keep */
2254 break;
2255 }
2256 }
2257 duk_hthread_catchstack_unwind(thr, i_stk + 1);
2258
2259 /* Unwind the topmost callstack entry before reusing it */
2260 DUK_ASSERT(thr->callstack_top > 0);
2261 duk_hthread_callstack_unwind(thr, thr->callstack_top - 1);
2262
2263 /* Then reuse the unwound activation; callstack was not shrunk so there is always space */
2264 thr->callstack_top++;
2265 DUK_ASSERT(thr->callstack_top <= thr->callstack_size);
2266 act = thr->callstack + thr->callstack_top - 1;
2267
2268 /* Start filling in the activation */
2269 act->func = func; /* don't want an intermediate exposed state with func == NULL */
2270 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2271 act->prev_caller = NULL;
2272 #endif
2273 DUK_ASSERT(func != NULL);
2274 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func));
2275 /* don't want an intermediate exposed state with invalid pc */
2276 act->curr_pc = DUK_HCOMPILEDFUNCTION_GET_CODE_BASE(thr->heap, (duk_hcompiledfunction *) func);
2277 #if defined(DUK_USE_DEBUGGER_SUPPORT)
2278 act->prev_line = 0;
2279 #endif
2280 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2281 #ifdef DUK_USE_REFERENCE_COUNTING
2282 DUK_HOBJECT_INCREF(thr, func);
2283 act = thr->callstack + thr->callstack_top - 1; /* side effects (currently none though) */
2284 #endif
2285
2286 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2287 #ifdef DUK_USE_TAILCALL
2288 #error incorrect options: tail calls enabled with function caller property
2289 #endif
2290 /* XXX: this doesn't actually work properly for tail calls, so
2291 * tail calls are disabled when DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2292 * is in use.
2293 */
2294 duk__update_func_caller_prop(thr, func);
2295 act = thr->callstack + thr->callstack_top - 1;
2296 #endif
2297
2298 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2299 DUK_ACT_FLAG_STRICT | DUK_ACT_FLAG_TAILCALLED :
2300 DUK_ACT_FLAG_TAILCALLED);
2301
2302 DUK_ASSERT(DUK_ACT_GET_FUNC(act) == func); /* already updated */
2303 DUK_ASSERT(act->var_env == NULL); /* already NULLed (by unwind) */
2304 DUK_ASSERT(act->lex_env == NULL); /* already NULLed (by unwind) */
2305 act->idx_bottom = entry_valstack_bottom_index; /* tail call -> reuse current "frame" */
2306 DUK_ASSERT(nregs >= 0);
2307 #if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2308 act->idx_retval = 0;
2309 #endif
2310
2311 /*
2312 * Manipulate valstack so that args are on the current bottom and the
2313 * previous caller's 'this' binding (which is the value preceding the
2314 * current bottom) is replaced with the new 'this' binding:
2315 *
2316 * [ ... this_old | (crud) func this_new arg1 ... argN ]
2317 * --> [ ... this_new | arg1 ... argN ]
2318 *
2319 * For tail calling to work properly, the valstack bottom must not grow
2320 * here; otherwise crud would accumulate on the valstack.
2321 */
2322
2323 tv1 = thr->valstack_bottom - 1;
2324 tv2 = thr->valstack_bottom + idx_func + 1;
2325 DUK_ASSERT(tv1 >= thr->valstack && tv1 < thr->valstack_top); /* tv1 is -below- valstack_bottom */
2326 DUK_ASSERT(tv2 >= thr->valstack_bottom && tv2 < thr->valstack_top);
2327 DUK_TVAL_SET_TVAL(&tv_tmp, tv1);
2328 DUK_TVAL_SET_TVAL(tv1, tv2);
2329 DUK_TVAL_INCREF(thr, tv1);
2330 DUK_TVAL_DECREF(thr, &tv_tmp); /* side effects */
2331
2332 for (i_arg = 0; i_arg < idx_args; i_arg++) {
2333 /* XXX: block removal API primitive */
2334 /* Note: 'func' is popped from valstack here, but it is
2335 * already reachable from the activation.
2336 */
2337 duk_remove(ctx, 0);
2338 }
2339 idx_func = 0; DUK_UNREF(idx_func); /* really 'not applicable' anymore, should not be referenced after this */
2340 idx_args = 0;
2341
2342 /* [ ... this_new | arg1 ... argN ] */
2343 } else {
2344 DUK_DDD(DUK_DDDPRINT("not a tail call, pushing a new activation to callstack, to index %ld",
2345 (long) (thr->callstack_top)));
2346
2347 duk_hthread_callstack_grow(thr);
2348
2349 if (call_flags & DUK_CALL_FLAG_IS_RESUME) {
2350 DUK_DDD(DUK_DDDPRINT("is resume -> no update to current activation (may not even exist)"));
2351 } else {
2352 DUK_DDD(DUK_DDDPRINT("update to current activation idx_retval"));
2353 DUK_ASSERT(thr->callstack_top < thr->callstack_size);
2354 DUK_ASSERT(thr->callstack_top >= 1);
2355 act = thr->callstack + thr->callstack_top - 1;
2356 DUK_ASSERT(DUK_ACT_GET_FUNC(act) != NULL);
2357 DUK_ASSERT(DUK_HOBJECT_IS_COMPILEDFUNCTION(DUK_ACT_GET_FUNC(act)));
2358 act->idx_retval = entry_valstack_bottom_index + idx_func;
2359 }
2360
2361 DUK_ASSERT(thr->callstack_top < thr->callstack_size);
2362 act = thr->callstack + thr->callstack_top;
2363 thr->callstack_top++;
2364 DUK_ASSERT(thr->callstack_top <= thr->callstack_size);
2365
2366 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func));
2367 DUK_ASSERT(!DUK_HOBJECT_HAS_NATIVEFUNCTION(func));
2368 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func));
2369
2370 act->flags = (DUK_HOBJECT_HAS_STRICT(func) ?
2371 DUK_ACT_FLAG_STRICT :
2372 0);
2373 act->func = func;
2374 act->var_env = NULL;
2375 act->lex_env = NULL;
2376 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2377 act->prev_caller = NULL;
2378 #endif
2379 DUK_ASSERT(func != NULL);
2380 DUK_ASSERT(DUK_HOBJECT_HAS_COMPILEDFUNCTION(func));
2381 act->curr_pc = DUK_HCOMPILEDFUNCTION_GET_CODE_BASE(thr->heap, (duk_hcompiledfunction *) func);
2382 #if defined(DUK_USE_DEBUGGER_SUPPORT)
2383 act->prev_line = 0;
2384 #endif
2385 act->idx_bottom = entry_valstack_bottom_index + idx_args;
2386 DUK_ASSERT(nregs >= 0);
2387 #if 0 /* topmost activation idx_retval is considered garbage, no need to init */
2388 act->idx_retval = 0;
2389 #endif
2390 DUK_TVAL_SET_OBJECT(&act->tv_func, func); /* borrowed, no refcount */
2391
2392 DUK_HOBJECT_INCREF(thr, func); /* act->func */
2393
2394 #ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
2395 duk__update_func_caller_prop(thr, func);
2396 act = thr->callstack + thr->callstack_top - 1;
2397 #endif
2398 }
2399
2400 /* [... func this arg1 ... argN] (not tail call)
2401 * [this | arg1 ... argN] (tail call)
2402 *
2403 * idx_args updated to match
2404 */
2405
2406 /*
2407 * Environment record creation and 'arguments' object creation.
2408 * Named function expression name binding is handled by the
2409 * compiler; the compiled function's parent env will contain
2410 * the (immutable) binding already.
2411 *
2412 * Delayed creation (on demand) is handled in duk_js_var.c.
2413 */
2414
2415 DUK_ASSERT(!DUK_HOBJECT_HAS_BOUND(func)); /* bound function chain has already been resolved */
2416
2417 if (!DUK_HOBJECT_HAS_NEWENV(func)) {
2418 /* use existing env (e.g. for non-strict eval); cannot have
2419 * an own 'arguments' object (but can refer to the existing one)
2420 */
2421
2422 duk__handle_oldenv_for_call(thr, func, act);
2423
2424 DUK_ASSERT(act->lex_env != NULL);
2425 DUK_ASSERT(act->var_env != NULL);
2426 goto env_done;
2427 }
2428
2429 DUK_ASSERT(DUK_HOBJECT_HAS_NEWENV(func));
2430
2431 if (!DUK_HOBJECT_HAS_CREATEARGS(func)) {
2432 /* no need to create environment record now; leave as NULL */
2433 DUK_ASSERT(act->lex_env == NULL);
2434 DUK_ASSERT(act->var_env == NULL);
2435 goto env_done;
2436 }
2437
2438 /* third arg: absolute index (to entire valstack) of idx_bottom of new activation */
2439 env = duk_create_activation_environment_record(thr, func, act->idx_bottom);
2440 DUK_ASSERT(env != NULL);
2441
2442 /* [... arg1 ... argN envobj] */
2443
2444 /* original input stack before nargs/nregs handling must be
2445 * intact for 'arguments' object
2446 */
2447 DUK_ASSERT(DUK_HOBJECT_HAS_CREATEARGS(func));
2448 duk__handle_createargs_for_call(thr, func, env, num_stack_args);
2449
2450 /* [... arg1 ... argN envobj] */
2451
2452 act = thr->callstack + thr->callstack_top - 1;
2453 act->lex_env = env;
2454 act->var_env = env;
2455 DUK_HOBJECT_INCREF(thr, act->lex_env);
2456 DUK_HOBJECT_INCREF(thr, act->var_env);
2457 duk_pop(ctx);
2458
2459 env_done:
2460 /* [... arg1 ... argN] */
2461
2462 /*
2463 * Setup value stack: clamp to 'nargs', fill up to 'nregs'
2464 */
2465
2466 duk__adjust_valstack_and_top(thr,
2467 num_stack_args,
2468 idx_args,
2469 nregs,
2470 nargs,
2471 func);
2472
2473 /*
2474 * Shift to new valstack_bottom.
2475 */
2476
2477 thr->valstack_bottom = thr->valstack_bottom + idx_args;
2478 /* keep current valstack_top */
2479 DUK_ASSERT(thr->valstack_bottom >= thr->valstack);
2480 DUK_ASSERT(thr->valstack_top >= thr->valstack_bottom);
2481 DUK_ASSERT(thr->valstack_end >= thr->valstack_top);
2482
2483 /*
2484 * Return to bytecode executor, which will resume execution from
2485 * the topmost activation.
2486 */
2487
2488 return 1;
2489 }