1/*
2 * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "LLIntSlowPaths.h"
28
29#include "ArrayConstructor.h"
30#include "CallFrame.h"
31#include "CommonSlowPaths.h"
32#include "Error.h"
33#include "ErrorHandlingScope.h"
34#include "EvalCodeBlock.h"
35#include "Exception.h"
36#include "ExceptionFuzz.h"
37#include "ExecutableBaseInlines.h"
38#include "FrameTracers.h"
39#include "FunctionCodeBlock.h"
40#include "FunctionWhitelist.h"
41#include "GetterSetter.h"
42#include "HostCallReturnValue.h"
43#include "InterpreterInlines.h"
44#include "IteratorOperations.h"
45#include "JIT.h"
46#include "JITExceptions.h"
47#include "JITWorklist.h"
48#include "JSAsyncFunction.h"
49#include "JSAsyncGeneratorFunction.h"
50#include "JSCInlines.h"
51#include "JSCJSValue.h"
52#include "JSGeneratorFunction.h"
53#include "JSGlobalObjectFunctions.h"
54#include "JSLexicalEnvironment.h"
55#include "JSString.h"
56#include "JSWithScope.h"
57#include "LLIntCommon.h"
58#include "LLIntData.h"
59#include "LLIntExceptions.h"
60#include "LLIntPrototypeLoadAdaptiveStructureWatchpoint.h"
61#include "LowLevelInterpreter.h"
62#include "ModuleProgramCodeBlock.h"
63#include "ObjectConstructor.h"
64#include "ObjectPropertyConditionSet.h"
65#include "OpcodeInlines.h"
66#include "ProgramCodeBlock.h"
67#include "ProtoCallFrame.h"
68#include "RegExpObject.h"
69#include "ShadowChicken.h"
70#include "StructureRareDataInlines.h"
71#include "SuperSampler.h"
72#include "VMInlines.h"
73#include <wtf/NeverDestroyed.h>
74#include <wtf/StringPrintStream.h>
75
76namespace JSC { namespace LLInt {
77
78#define LLINT_BEGIN_NO_SET_PC() \
79 VM& vm = exec->vm(); \
80 NativeCallFrameTracer tracer(&vm, exec); \
81 auto throwScope = DECLARE_THROW_SCOPE(vm)
82
83#ifndef NDEBUG
84#define LLINT_SET_PC_FOR_STUBS() do { \
85 exec->codeBlock()->bytecodeOffset(pc); \
86 exec->setCurrentVPC(pc); \
87 } while (false)
88#else
89#define LLINT_SET_PC_FOR_STUBS() do { \
90 exec->setCurrentVPC(pc); \
91 } while (false)
92#endif
93
94#define LLINT_BEGIN() \
95 LLINT_BEGIN_NO_SET_PC(); \
96 LLINT_SET_PC_FOR_STUBS()
97
98inline JSValue getNonConstantOperand(ExecState* exec, const VirtualRegister& operand) { return exec->uncheckedR(operand.offset()).jsValue(); }
99inline JSValue getOperand(ExecState* exec, const VirtualRegister& operand) { return exec->r(operand.offset()).jsValue(); }
100
101#define LLINT_RETURN_TWO(first, second) do { \
102 return encodeResult(first, second); \
103 } while (false)
104
105#define LLINT_END_IMPL() LLINT_RETURN_TWO(pc, 0)
106
107#define LLINT_THROW(exceptionToThrow) do { \
108 throwException(exec, throwScope, exceptionToThrow); \
109 pc = returnToThrow(exec); \
110 LLINT_END_IMPL(); \
111 } while (false)
112
113#define LLINT_CHECK_EXCEPTION() do { \
114 doExceptionFuzzingIfEnabled(exec, throwScope, "LLIntSlowPaths", pc); \
115 if (UNLIKELY(throwScope.exception())) { \
116 pc = returnToThrow(exec); \
117 LLINT_END_IMPL(); \
118 } \
119 } while (false)
120
121#define LLINT_END() do { \
122 LLINT_CHECK_EXCEPTION(); \
123 LLINT_END_IMPL(); \
124 } while (false)
125
126#define JUMP_OFFSET(targetOffset) \
127 ((targetOffset) ? (targetOffset) : exec->codeBlock()->outOfLineJumpOffset(pc))
128
129#define JUMP_TO(target) do { \
130 pc = reinterpret_cast<const Instruction*>(reinterpret_cast<const uint8_t*>(pc) + (target)); \
131 } while (false)
132
133#define LLINT_BRANCH(condition) do { \
134 bool __b_condition = (condition); \
135 LLINT_CHECK_EXCEPTION(); \
136 if (__b_condition) \
137 JUMP_TO(JUMP_OFFSET(bytecode.m_targetLabel)); \
138 else \
139 JUMP_TO(pc->size()); \
140 LLINT_END_IMPL(); \
141 } while (false)
142
143#define LLINT_RETURN(value) do { \
144 JSValue __r_returnValue = (value); \
145 LLINT_CHECK_EXCEPTION(); \
146 exec->uncheckedR(bytecode.m_dst) = __r_returnValue; \
147 LLINT_END_IMPL(); \
148 } while (false)
149
150#define LLINT_RETURN_PROFILED(value) do { \
151 JSValue __rp_returnValue = (value); \
152 LLINT_CHECK_EXCEPTION(); \
153 exec->uncheckedR(bytecode.m_dst) = __rp_returnValue; \
154 LLINT_PROFILE_VALUE(__rp_returnValue); \
155 LLINT_END_IMPL(); \
156 } while (false)
157
158#define LLINT_PROFILE_VALUE(value) do { \
159 bytecode.metadata(exec).m_profile.m_buckets[0] = JSValue::encode(value); \
160 } while (false)
161
162#define LLINT_CALL_END_IMPL(exec, callTarget, callTargetTag) \
163 LLINT_RETURN_TWO(retagCodePtr((callTarget), callTargetTag, SlowPathPtrTag), (exec))
164
165#define LLINT_CALL_THROW(exec, exceptionToThrow) do { \
166 ExecState* __ct_exec = (exec); \
167 throwException(__ct_exec, throwScope, exceptionToThrow); \
168 LLINT_CALL_END_IMPL(0, callToThrow(__ct_exec), ExceptionHandlerPtrTag); \
169 } while (false)
170
171#define LLINT_CALL_CHECK_EXCEPTION(exec, execCallee) do { \
172 ExecState* __cce_exec = (exec); \
173 ExecState* __cce_execCallee = (execCallee); \
174 doExceptionFuzzingIfEnabled(__cce_exec, throwScope, "LLIntSlowPaths/call", nullptr); \
175 if (UNLIKELY(throwScope.exception())) \
176 LLINT_CALL_END_IMPL(0, callToThrow(__cce_execCallee), ExceptionHandlerPtrTag); \
177 } while (false)
178
179#define LLINT_CALL_RETURN(exec, execCallee, callTarget, callTargetTag) do { \
180 ExecState* __cr_exec = (exec); \
181 ExecState* __cr_execCallee = (execCallee); \
182 void* __cr_callTarget = (callTarget); \
183 LLINT_CALL_CHECK_EXCEPTION(__cr_exec, __cr_execCallee); \
184 LLINT_CALL_END_IMPL(__cr_execCallee, __cr_callTarget, callTargetTag); \
185 } while (false)
186
187#define LLINT_RETURN_CALLEE_FRAME(execCallee) do { \
188 ExecState* __rcf_exec = (execCallee); \
189 LLINT_RETURN_TWO(pc, __rcf_exec); \
190 } while (false)
191
192#if LLINT_TRACING
193
194template<typename... Types>
195void slowPathLog(const Types&... values)
196{
197 dataLogIf(Options::traceLLIntSlowPath(), values...);
198}
199
200template<typename... Types>
201void slowPathLn(const Types&... values)
202{
203 dataLogLnIf(Options::traceLLIntSlowPath(), values...);
204}
205
206template<typename... Types>
207void slowPathLogF(const char* format, const Types&... values)
208{
209 ALLOW_NONLITERAL_FORMAT_BEGIN
210 IGNORE_WARNINGS_BEGIN("format-security")
211 if (Options::traceLLIntSlowPath())
212 dataLogF(format, values...);
213 IGNORE_WARNINGS_END
214 ALLOW_NONLITERAL_FORMAT_END
215}
216
217#else // not LLINT_TRACING
218
219template<typename... Types> void slowPathLog(const Types&...) { }
220template<typename... Types> void slowPathLogLn(const Types&...) { }
221template<typename... Types> void slowPathLogF(const char*, const Types&...) { }
222
223#endif // LLINT_TRACING
224
225extern "C" SlowPathReturnType llint_trace_operand(ExecState* exec, const Instruction* pc, int fromWhere, int operand)
226{
227 if (!Options::traceLLIntExecution())
228 LLINT_END_IMPL();
229
230 LLINT_BEGIN();
231 dataLogF(
232 "<%p> %p / %p: executing bc#%zu, op#%u: Trace(%d): %d\n",
233 &Thread::current(),
234 exec->codeBlock(),
235 exec,
236 static_cast<intptr_t>(exec->codeBlock()->bytecodeOffset(pc)),
237 pc->opcodeID(),
238 fromWhere,
239 operand);
240 LLINT_END();
241}
242
243extern "C" SlowPathReturnType llint_trace_value(ExecState* exec, const Instruction* pc, int fromWhere, VirtualRegister operand)
244{
245 if (!Options::traceLLIntExecution())
246 LLINT_END_IMPL();
247
248 JSValue value = getOperand(exec, operand);
249 union {
250 struct {
251 uint32_t tag;
252 uint32_t payload;
253 } bits;
254 EncodedJSValue asValue;
255 } u;
256 u.asValue = JSValue::encode(value);
257 dataLogF(
258 "<%p> %p / %p: executing bc#%zu, op#%u: Trace(%d): %d: %08x:%08x: %s\n",
259 &Thread::current(),
260 exec->codeBlock(),
261 exec,
262 static_cast<intptr_t>(exec->codeBlock()->bytecodeOffset(pc)),
263 pc->opcodeID(),
264 fromWhere,
265 operand.offset(),
266 u.bits.tag,
267 u.bits.payload,
268 toCString(value).data());
269 LLINT_END_IMPL();
270}
271
272LLINT_SLOW_PATH_DECL(trace_prologue)
273{
274 if (!Options::traceLLIntExecution())
275 LLINT_END_IMPL();
276
277 dataLogF("<%p> %p / %p: in prologue of ", &Thread::current(), exec->codeBlock(), exec);
278 dataLog(exec->codeBlock(), "\n");
279 LLINT_END_IMPL();
280}
281
282static void traceFunctionPrologue(ExecState* exec, const char* comment, CodeSpecializationKind kind)
283{
284 if (!Options::traceLLIntExecution())
285 return;
286
287 JSFunction* callee = jsCast<JSFunction*>(exec->jsCallee());
288 FunctionExecutable* executable = callee->jsExecutable();
289 CodeBlock* codeBlock = executable->codeBlockFor(kind);
290 dataLogF("<%p> %p / %p: in %s of ", &Thread::current(), codeBlock, exec, comment);
291 dataLog(codeBlock);
292 dataLogF(" function %p, executable %p; numVars = %u, numParameters = %u, numCalleeLocals = %u, caller = %p.\n",
293 callee, executable, codeBlock->numVars(), codeBlock->numParameters(), codeBlock->numCalleeLocals(), exec->callerFrame());
294}
295
296LLINT_SLOW_PATH_DECL(trace_prologue_function_for_call)
297{
298 traceFunctionPrologue(exec, "call prologue", CodeForCall);
299 LLINT_END_IMPL();
300}
301
302LLINT_SLOW_PATH_DECL(trace_prologue_function_for_construct)
303{
304 traceFunctionPrologue(exec, "construct prologue", CodeForConstruct);
305 LLINT_END_IMPL();
306}
307
308LLINT_SLOW_PATH_DECL(trace_arityCheck_for_call)
309{
310 traceFunctionPrologue(exec, "call arity check", CodeForCall);
311 LLINT_END_IMPL();
312}
313
314LLINT_SLOW_PATH_DECL(trace_arityCheck_for_construct)
315{
316 traceFunctionPrologue(exec, "construct arity check", CodeForConstruct);
317 LLINT_END_IMPL();
318}
319
320LLINT_SLOW_PATH_DECL(trace)
321{
322 if (!Options::traceLLIntExecution())
323 LLINT_END_IMPL();
324
325 OpcodeID opcodeID = pc->opcodeID();
326 dataLogF("<%p> %p / %p: executing bc#%zu, %s, pc = %p\n",
327 &Thread::current(),
328 exec->codeBlock(),
329 exec,
330 static_cast<intptr_t>(exec->codeBlock()->bytecodeOffset(pc)),
331 pc->name(),
332 pc);
333 if (opcodeID == op_enter) {
334 dataLogF("Frame will eventually return to %p\n", exec->returnPC().value());
335 *removeCodePtrTag<volatile char*>(exec->returnPC().value());
336 }
337 if (opcodeID == op_ret) {
338 dataLogF("Will be returning to %p\n", exec->returnPC().value());
339 dataLogF("The new cfr will be %p\n", exec->callerFrame());
340 }
341 LLINT_END_IMPL();
342}
343
344enum EntryKind { Prologue, ArityCheck };
345
346#if ENABLE(JIT)
347static FunctionWhitelist& ensureGlobalJITWhitelist()
348{
349 static LazyNeverDestroyed<FunctionWhitelist> baselineWhitelist;
350 static std::once_flag initializeWhitelistFlag;
351 std::call_once(initializeWhitelistFlag, [] {
352 const char* functionWhitelistFile = Options::jitWhitelist();
353 baselineWhitelist.construct(functionWhitelistFile);
354 });
355 return baselineWhitelist;
356}
357
358inline bool shouldJIT(CodeBlock* codeBlock)
359{
360 if (!Options::bytecodeRangeToJITCompile().isInRange(codeBlock->instructionsSize())
361 || !ensureGlobalJITWhitelist().contains(codeBlock))
362 return false;
363
364 return VM::canUseJIT() && Options::useBaselineJIT();
365}
366
367// Returns true if we should try to OSR.
368inline bool jitCompileAndSetHeuristics(CodeBlock* codeBlock, ExecState* exec, unsigned loopOSREntryBytecodeOffset = 0)
369{
370 VM& vm = exec->vm();
371 DeferGCForAWhile deferGC(vm.heap); // My callers don't set top callframe, so we don't want to GC here at all.
372 ASSERT(VM::canUseJIT());
373
374 codeBlock->updateAllValueProfilePredictions();
375
376 if (!codeBlock->checkIfJITThresholdReached()) {
377 CODEBLOCK_LOG_EVENT(codeBlock, "delayJITCompile", ("threshold not reached, counter = ", codeBlock->llintExecuteCounter()));
378 if (Options::verboseOSR())
379 dataLogF(" JIT threshold should be lifted.\n");
380 return false;
381 }
382
383 JITWorklist::ensureGlobalWorklist().poll(vm);
384
385 switch (codeBlock->jitType()) {
386 case JITType::BaselineJIT: {
387 if (Options::verboseOSR())
388 dataLogF(" Code was already compiled.\n");
389 codeBlock->jitSoon();
390 return true;
391 }
392 case JITType::InterpreterThunk: {
393 JITWorklist::ensureGlobalWorklist().compileLater(codeBlock, loopOSREntryBytecodeOffset);
394 return codeBlock->jitType() == JITType::BaselineJIT;
395 }
396 default:
397 dataLog("Unexpected code block in LLInt: ", *codeBlock, "\n");
398 RELEASE_ASSERT_NOT_REACHED();
399 return false;
400 }
401}
402
403static SlowPathReturnType entryOSR(ExecState* exec, const Instruction*, CodeBlock* codeBlock, const char *name, EntryKind kind)
404{
405 if (Options::verboseOSR()) {
406 dataLog(
407 *codeBlock, ": Entered ", name, " with executeCounter = ",
408 codeBlock->llintExecuteCounter(), "\n");
409 }
410
411 if (!shouldJIT(codeBlock)) {
412 codeBlock->dontJITAnytimeSoon();
413 LLINT_RETURN_TWO(0, 0);
414 }
415 if (!jitCompileAndSetHeuristics(codeBlock, exec))
416 LLINT_RETURN_TWO(0, 0);
417
418 CODEBLOCK_LOG_EVENT(codeBlock, "OSR entry", ("in prologue"));
419
420 if (kind == Prologue)
421 LLINT_RETURN_TWO(codeBlock->jitCode()->executableAddress(), 0);
422 ASSERT(kind == ArityCheck);
423 LLINT_RETURN_TWO(codeBlock->jitCode()->addressForCall(MustCheckArity).executableAddress(), 0);
424}
425#else // ENABLE(JIT)
426static SlowPathReturnType entryOSR(ExecState* exec, const Instruction*, CodeBlock* codeBlock, const char*, EntryKind)
427{
428 codeBlock->dontJITAnytimeSoon();
429 LLINT_RETURN_TWO(0, exec);
430}
431#endif // ENABLE(JIT)
432
433LLINT_SLOW_PATH_DECL(entry_osr)
434{
435 return entryOSR(exec, pc, exec->codeBlock(), "entry_osr", Prologue);
436}
437
438LLINT_SLOW_PATH_DECL(entry_osr_function_for_call)
439{
440 return entryOSR(exec, pc, jsCast<JSFunction*>(exec->jsCallee())->jsExecutable()->codeBlockForCall(), "entry_osr_function_for_call", Prologue);
441}
442
443LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct)
444{
445 return entryOSR(exec, pc, jsCast<JSFunction*>(exec->jsCallee())->jsExecutable()->codeBlockForConstruct(), "entry_osr_function_for_construct", Prologue);
446}
447
448LLINT_SLOW_PATH_DECL(entry_osr_function_for_call_arityCheck)
449{
450 return entryOSR(exec, pc, jsCast<JSFunction*>(exec->jsCallee())->jsExecutable()->codeBlockForCall(), "entry_osr_function_for_call_arityCheck", ArityCheck);
451}
452
453LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct_arityCheck)
454{
455 return entryOSR(exec, pc, jsCast<JSFunction*>(exec->jsCallee())->jsExecutable()->codeBlockForConstruct(), "entry_osr_function_for_construct_arityCheck", ArityCheck);
456}
457
458LLINT_SLOW_PATH_DECL(loop_osr)
459{
460 LLINT_BEGIN_NO_SET_PC();
461 UNUSED_PARAM(throwScope);
462 CodeBlock* codeBlock = exec->codeBlock();
463
464#if ENABLE(JIT)
465 if (Options::verboseOSR()) {
466 dataLog(
467 *codeBlock, ": Entered loop_osr with executeCounter = ",
468 codeBlock->llintExecuteCounter(), "\n");
469 }
470
471 unsigned loopOSREntryBytecodeOffset = codeBlock->bytecodeOffset(pc);
472
473 if (!shouldJIT(codeBlock)) {
474 codeBlock->dontJITAnytimeSoon();
475 LLINT_RETURN_TWO(0, 0);
476 }
477
478 if (!jitCompileAndSetHeuristics(codeBlock, exec, loopOSREntryBytecodeOffset))
479 LLINT_RETURN_TWO(0, 0);
480
481 CODEBLOCK_LOG_EVENT(codeBlock, "osrEntry", ("at bc#", loopOSREntryBytecodeOffset));
482
483 ASSERT(codeBlock->jitType() == JITType::BaselineJIT);
484
485 const JITCodeMap& codeMap = codeBlock->jitCodeMap();
486 CodeLocationLabel<JSEntryPtrTag> codeLocation = codeMap.find(loopOSREntryBytecodeOffset);
487 ASSERT(codeLocation);
488
489 void* jumpTarget = codeLocation.executableAddress();
490 ASSERT(jumpTarget);
491
492 LLINT_RETURN_TWO(jumpTarget, exec->topOfFrame());
493#else // ENABLE(JIT)
494 UNUSED_PARAM(pc);
495 codeBlock->dontJITAnytimeSoon();
496 LLINT_RETURN_TWO(0, 0);
497#endif // ENABLE(JIT)
498}
499
500LLINT_SLOW_PATH_DECL(replace)
501{
502 LLINT_BEGIN_NO_SET_PC();
503 UNUSED_PARAM(throwScope);
504 CodeBlock* codeBlock = exec->codeBlock();
505
506#if ENABLE(JIT)
507 if (Options::verboseOSR()) {
508 dataLog(
509 *codeBlock, ": Entered replace with executeCounter = ",
510 codeBlock->llintExecuteCounter(), "\n");
511 }
512
513 if (shouldJIT(codeBlock))
514 jitCompileAndSetHeuristics(codeBlock, exec);
515 else
516 codeBlock->dontJITAnytimeSoon();
517 LLINT_END_IMPL();
518#else // ENABLE(JIT)
519 codeBlock->dontJITAnytimeSoon();
520 LLINT_END_IMPL();
521#endif // ENABLE(JIT)
522}
523
524LLINT_SLOW_PATH_DECL(stack_check)
525{
526 VM& vm = exec->vm();
527 auto throwScope = DECLARE_THROW_SCOPE(vm);
528
529 // It's ok to create the NativeCallFrameTracer here before we
530 // convertToStackOverflowFrame() because this function is always called
531 // after the frame has been propulated with a proper CodeBlock and callee.
532 NativeCallFrameTracer tracer(&vm, exec);
533
534 LLINT_SET_PC_FOR_STUBS();
535
536 CodeBlock* codeBlock = exec->codeBlock();
537 slowPathLogF("Checking stack height with exec = %p.\n", exec);
538 slowPathLog("CodeBlock = ", codeBlock, "\n");
539 if (codeBlock) {
540 slowPathLogF("Num callee registers = %u.\n", codeBlock->numCalleeLocals());
541 slowPathLogF("Num vars = %u.\n", codeBlock->numVars());
542 }
543 slowPathLogF("Current OS stack end is at %p.\n", vm.softStackLimit());
544#if ENABLE(C_LOOP)
545 slowPathLogF("Current C Loop stack end is at %p.\n", vm.cloopStackLimit());
546#endif
547
548 // If the stack check succeeds and we don't need to throw the error, then
549 // we'll return 0 instead. The prologue will check for a non-zero value
550 // when determining whether to set the callFrame or not.
551
552 // For JIT enabled builds which uses the C stack, the stack is not growable.
553 // Hence, if we get here, then we know a stack overflow is imminent. So, just
554 // throw the StackOverflowError unconditionally.
555#if ENABLE(C_LOOP)
556 Register* topOfFrame = exec->topOfFrame();
557 if (LIKELY(topOfFrame < reinterpret_cast<Register*>(exec))) {
558 ASSERT(!vm.interpreter->cloopStack().containsAddress(topOfFrame));
559 if (LIKELY(vm.ensureStackCapacityFor(topOfFrame)))
560 LLINT_RETURN_TWO(pc, 0);
561 }
562#endif
563
564 exec->convertToStackOverflowFrame(vm, codeBlock);
565 ErrorHandlingScope errorScope(vm);
566 throwStackOverflowError(exec, throwScope);
567 pc = returnToThrow(exec);
568 LLINT_RETURN_TWO(pc, exec);
569}
570
571LLINT_SLOW_PATH_DECL(slow_path_new_object)
572{
573 LLINT_BEGIN();
574 auto bytecode = pc->as<OpNewObject>();
575 auto& metadata = bytecode.metadata(exec);
576 LLINT_RETURN(constructEmptyObject(exec, metadata.m_objectAllocationProfile.structure()));
577}
578
579LLINT_SLOW_PATH_DECL(slow_path_new_array)
580{
581 LLINT_BEGIN();
582 auto bytecode = pc->as<OpNewArray>();
583 auto& metadata = bytecode.metadata(exec);
584 LLINT_RETURN(constructArrayNegativeIndexed(exec, &metadata.m_arrayAllocationProfile, bitwise_cast<JSValue*>(&exec->uncheckedR(bytecode.m_argv)), bytecode.m_argc));
585}
586
587LLINT_SLOW_PATH_DECL(slow_path_new_array_with_size)
588{
589 LLINT_BEGIN();
590 auto bytecode = pc->as<OpNewArrayWithSize>();
591 auto& metadata = bytecode.metadata(exec);
592 LLINT_RETURN(constructArrayWithSizeQuirk(exec, &metadata.m_arrayAllocationProfile, exec->lexicalGlobalObject(), getOperand(exec, bytecode.m_length)));
593}
594
595LLINT_SLOW_PATH_DECL(slow_path_new_regexp)
596{
597 LLINT_BEGIN();
598 auto bytecode = pc->as<OpNewRegexp>();
599 RegExp* regExp = jsCast<RegExp*>(getOperand(exec, bytecode.m_regexp));
600 ASSERT(regExp->isValid());
601 LLINT_RETURN(RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regExp));
602}
603
604LLINT_SLOW_PATH_DECL(slow_path_instanceof)
605{
606 LLINT_BEGIN();
607 auto bytecode = pc->as<OpInstanceof>();
608 JSValue value = getOperand(exec, bytecode.m_value);
609 JSValue proto = getOperand(exec, bytecode.m_prototype);
610 LLINT_RETURN(jsBoolean(JSObject::defaultHasInstance(exec, value, proto)));
611}
612
613LLINT_SLOW_PATH_DECL(slow_path_instanceof_custom)
614{
615 LLINT_BEGIN();
616
617 auto bytecode = pc->as<OpInstanceofCustom>();
618 JSValue value = getOperand(exec, bytecode.m_value);
619 JSValue constructor = getOperand(exec, bytecode.m_constructor);
620 JSValue hasInstanceValue = getOperand(exec, bytecode.m_hasInstanceValue);
621
622 ASSERT(constructor.isObject());
623 ASSERT(hasInstanceValue != exec->lexicalGlobalObject()->functionProtoHasInstanceSymbolFunction() || !constructor.getObject()->structure(vm)->typeInfo().implementsDefaultHasInstance());
624
625 JSValue result = jsBoolean(constructor.getObject()->hasInstance(exec, value, hasInstanceValue));
626 LLINT_RETURN(result);
627}
628
629LLINT_SLOW_PATH_DECL(slow_path_try_get_by_id)
630{
631 LLINT_BEGIN();
632 auto bytecode = pc->as<OpTryGetById>();
633 CodeBlock* codeBlock = exec->codeBlock();
634 const Identifier& ident = codeBlock->identifier(bytecode.m_property);
635 JSValue baseValue = getOperand(exec, bytecode.m_base);
636 PropertySlot slot(baseValue, PropertySlot::PropertySlot::InternalMethodType::VMInquiry);
637
638 baseValue.getPropertySlot(exec, ident, slot);
639 JSValue result = slot.getPureResult();
640
641 LLINT_RETURN_PROFILED(result);
642}
643
644LLINT_SLOW_PATH_DECL(slow_path_get_by_id_direct)
645{
646 LLINT_BEGIN();
647 auto bytecode = pc->as<OpGetByIdDirect>();
648 CodeBlock* codeBlock = exec->codeBlock();
649 const Identifier& ident = codeBlock->identifier(bytecode.m_property);
650 JSValue baseValue = getOperand(exec, bytecode.m_base);
651 PropertySlot slot(baseValue, PropertySlot::PropertySlot::InternalMethodType::GetOwnProperty);
652
653 bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
654 LLINT_CHECK_EXCEPTION();
655 JSValue result = found ? slot.getValue(exec, ident) : jsUndefined();
656 LLINT_CHECK_EXCEPTION();
657
658 if (!LLINT_ALWAYS_ACCESS_SLOW && slot.isCacheable()) {
659 auto& metadata = bytecode.metadata(exec);
660 {
661 StructureID oldStructureID = metadata.m_structureID;
662 if (oldStructureID) {
663 Structure* a = vm.heap.structureIDTable().get(oldStructureID);
664 Structure* b = baseValue.asCell()->structure(vm);
665
666 if (Structure::shouldConvertToPolyProto(a, b)) {
667 ASSERT(a->rareData()->sharedPolyProtoWatchpoint().get() == b->rareData()->sharedPolyProtoWatchpoint().get());
668 a->rareData()->sharedPolyProtoWatchpoint()->invalidate(vm, StringFireDetail("Detected poly proto opportunity."));
669 }
670 }
671 }
672
673 JSCell* baseCell = baseValue.asCell();
674 Structure* structure = baseCell->structure(vm);
675 if (slot.isValue()) {
676 // Start out by clearing out the old cache.
677 metadata.m_structureID = 0;
678 metadata.m_offset = 0;
679
680 if (structure->propertyAccessesAreCacheable()
681 && !structure->needImpurePropertyWatchpoint()) {
682 vm.heap.writeBarrier(codeBlock);
683
684 ConcurrentJSLocker locker(codeBlock->m_lock);
685
686 metadata.m_structureID = structure->id();
687 metadata.m_offset = slot.cachedOffset();
688 }
689 }
690 }
691
692 LLINT_RETURN_PROFILED(result);
693}
694
695
696static void setupGetByIdPrototypeCache(ExecState* exec, VM& vm, const Instruction* pc, OpGetById::Metadata& metadata, JSCell* baseCell, PropertySlot& slot, const Identifier& ident)
697{
698 CodeBlock* codeBlock = exec->codeBlock();
699 Structure* structure = baseCell->structure(vm);
700
701 if (structure->typeInfo().prohibitsPropertyCaching())
702 return;
703
704 if (structure->needImpurePropertyWatchpoint())
705 return;
706
707 if (structure->isDictionary()) {
708 if (structure->hasBeenFlattenedBefore())
709 return;
710 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
711 }
712
713 ObjectPropertyConditionSet conditions;
714 if (slot.isUnset())
715 conditions = generateConditionsForPropertyMiss(vm, codeBlock, exec, structure, ident.impl());
716 else
717 conditions = generateConditionsForPrototypePropertyHit(vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
718
719 if (!conditions.isValid())
720 return;
721
722 unsigned bytecodeOffset = codeBlock->bytecodeOffset(pc);
723 PropertyOffset offset = invalidOffset;
724 CodeBlock::StructureWatchpointMap& watchpointMap = codeBlock->llintGetByIdWatchpointMap();
725 Vector<LLIntPrototypeLoadAdaptiveStructureWatchpoint> watchpoints;
726 watchpoints.reserveInitialCapacity(conditions.size());
727 for (ObjectPropertyCondition condition : conditions) {
728 if (!condition.isWatchable())
729 return;
730 if (condition.condition().kind() == PropertyCondition::Presence)
731 offset = condition.condition().offset();
732 watchpoints.uncheckedConstructAndAppend(codeBlock, condition, bytecodeOffset);
733 watchpoints.last().install(vm);
734 }
735
736 ASSERT((offset == invalidOffset) == slot.isUnset());
737 auto result = watchpointMap.add(std::make_tuple(structure->id(), bytecodeOffset), WTFMove(watchpoints));
738 ASSERT_UNUSED(result, result.isNewEntry);
739
740 ConcurrentJSLocker locker(codeBlock->m_lock);
741
742 if (slot.isUnset()) {
743 metadata.m_mode = GetByIdMode::Unset;
744 metadata.m_modeMetadata.unsetMode.structureID = structure->id();
745 return;
746 }
747 ASSERT(slot.isValue());
748
749 metadata.m_mode = GetByIdMode::ProtoLoad;
750 metadata.m_modeMetadata.protoLoadMode.structureID = structure->id();
751 metadata.m_modeMetadata.protoLoadMode.cachedOffset = offset;
752 metadata.m_modeMetadata.protoLoadMode.cachedSlot = slot.slotBase();
753 // We know that this pointer will remain valid because it will be cleared by either a watchpoint fire or
754 // during GC when we clear the LLInt caches.
755 metadata.m_modeMetadata.protoLoadMode.cachedSlot = slot.slotBase();
756}
757
758
759LLINT_SLOW_PATH_DECL(slow_path_get_by_id)
760{
761 LLINT_BEGIN();
762 auto bytecode = pc->as<OpGetById>();
763 auto& metadata = bytecode.metadata(exec);
764 CodeBlock* codeBlock = exec->codeBlock();
765 const Identifier& ident = codeBlock->identifier(bytecode.m_property);
766 JSValue baseValue = getOperand(exec, bytecode.m_base);
767 PropertySlot slot(baseValue, PropertySlot::PropertySlot::InternalMethodType::Get);
768
769 JSValue result = baseValue.get(exec, ident, slot);
770 LLINT_CHECK_EXCEPTION();
771 exec->uncheckedR(bytecode.m_dst) = result;
772
773 if (!LLINT_ALWAYS_ACCESS_SLOW
774 && baseValue.isCell()
775 && slot.isCacheable()) {
776 {
777 StructureID oldStructureID;
778 auto mode = metadata.m_mode;
779 switch (mode) {
780 case GetByIdMode::Default:
781 oldStructureID = metadata.m_modeMetadata.defaultMode.structureID;
782 break;
783 case GetByIdMode::Unset:
784 oldStructureID = metadata.m_modeMetadata.unsetMode.structureID;
785 break;
786 case GetByIdMode::ProtoLoad:
787 oldStructureID = metadata.m_modeMetadata.protoLoadMode.structureID;
788 break;
789 default:
790 oldStructureID = 0;
791 }
792 if (oldStructureID) {
793 Structure* a = vm.heap.structureIDTable().get(oldStructureID);
794 Structure* b = baseValue.asCell()->structure(vm);
795
796 if (Structure::shouldConvertToPolyProto(a, b)) {
797 ASSERT(a->rareData()->sharedPolyProtoWatchpoint().get() == b->rareData()->sharedPolyProtoWatchpoint().get());
798 a->rareData()->sharedPolyProtoWatchpoint()->invalidate(vm, StringFireDetail("Detected poly proto opportunity."));
799 }
800 }
801 }
802
803 JSCell* baseCell = baseValue.asCell();
804 Structure* structure = baseCell->structure(vm);
805 if (slot.isValue() && slot.slotBase() == baseValue) {
806 // Start out by clearing out the old cache.
807 metadata.m_mode = GetByIdMode::Default;
808 metadata.m_modeMetadata.defaultMode.structureID = 0;
809 metadata.m_modeMetadata.defaultMode.cachedOffset = 0;
810
811 // Prevent the prototype cache from ever happening.
812 metadata.m_hitCountForLLIntCaching = 0;
813
814 if (structure->propertyAccessesAreCacheable()
815 && !structure->needImpurePropertyWatchpoint()) {
816 vm.heap.writeBarrier(codeBlock);
817
818 ConcurrentJSLocker locker(codeBlock->m_lock);
819
820 metadata.m_modeMetadata.defaultMode.structureID = structure->id();
821 metadata.m_modeMetadata.defaultMode.cachedOffset = slot.cachedOffset();
822 }
823 } else if (UNLIKELY(metadata.m_hitCountForLLIntCaching && (slot.isValue() || slot.isUnset()))) {
824 ASSERT(slot.slotBase() != baseValue);
825
826 if (!(--metadata.m_hitCountForLLIntCaching))
827 setupGetByIdPrototypeCache(exec, vm, pc, metadata, baseCell, slot, ident);
828 }
829 } else if (!LLINT_ALWAYS_ACCESS_SLOW
830 && isJSArray(baseValue)
831 && ident == vm.propertyNames->length) {
832 metadata.m_mode = GetByIdMode::ArrayLength;
833 new (&metadata.m_modeMetadata.arrayLengthMode.arrayProfile) ArrayProfile(codeBlock->bytecodeOffset(pc));
834 metadata.m_modeMetadata.arrayLengthMode.arrayProfile.observeStructure(baseValue.asCell()->structure(vm));
835
836 // Prevent the prototype cache from ever happening.
837 metadata.m_hitCountForLLIntCaching = 0;
838 }
839
840 LLINT_PROFILE_VALUE(result);
841 LLINT_END();
842}
843
844LLINT_SLOW_PATH_DECL(slow_path_put_by_id)
845{
846 LLINT_BEGIN();
847 auto bytecode = pc->as<OpPutById>();
848 auto& metadata = bytecode.metadata(exec);
849 CodeBlock* codeBlock = exec->codeBlock();
850 const Identifier& ident = codeBlock->identifier(bytecode.m_property);
851
852 JSValue baseValue = getOperand(exec, bytecode.m_base);
853 PutPropertySlot slot(baseValue, codeBlock->isStrictMode(), codeBlock->putByIdContext());
854 if (bytecode.m_flags & PutByIdIsDirect)
855 CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, getOperand(exec, bytecode.m_value), slot);
856 else
857 baseValue.putInline(exec, ident, getOperand(exec, bytecode.m_value), slot);
858 LLINT_CHECK_EXCEPTION();
859
860 if (!LLINT_ALWAYS_ACCESS_SLOW
861 && baseValue.isCell()
862 && slot.isCacheablePut()) {
863
864 {
865 StructureID oldStructureID = metadata.m_oldStructureID;
866 if (oldStructureID) {
867 Structure* a = vm.heap.structureIDTable().get(oldStructureID);
868 Structure* b = baseValue.asCell()->structure(vm);
869 if (slot.type() == PutPropertySlot::NewProperty)
870 b = b->previousID();
871
872 if (Structure::shouldConvertToPolyProto(a, b)) {
873 a->rareData()->sharedPolyProtoWatchpoint()->invalidate(vm, StringFireDetail("Detected poly proto opportunity."));
874 b->rareData()->sharedPolyProtoWatchpoint()->invalidate(vm, StringFireDetail("Detected poly proto opportunity."));
875 }
876 }
877 }
878
879 // Start out by clearing out the old cache.
880 metadata.m_oldStructureID = 0;
881 metadata.m_offset = 0;
882 metadata.m_newStructureID = 0;
883 metadata.m_structureChain.clear();
884
885 JSCell* baseCell = baseValue.asCell();
886 Structure* structure = baseCell->structure(vm);
887
888 if (!structure->isUncacheableDictionary()
889 && !structure->typeInfo().prohibitsPropertyCaching()
890 && baseCell == slot.base()) {
891
892 vm.heap.writeBarrier(codeBlock);
893
894 if (slot.type() == PutPropertySlot::NewProperty) {
895 GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap);
896
897 if (!structure->isDictionary() && structure->previousID()->outOfLineCapacity() == structure->outOfLineCapacity()) {
898 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
899
900 bool sawPolyProto = false;
901 auto result = normalizePrototypeChain(exec, baseCell, sawPolyProto);
902 if (result != InvalidPrototypeChain && !sawPolyProto) {
903 ASSERT(structure->previousID()->isObject());
904 metadata.m_oldStructureID = structure->previousID()->id();
905 metadata.m_offset = slot.cachedOffset();
906 metadata.m_newStructureID = structure->id();
907 if (!(bytecode.m_flags & PutByIdIsDirect)) {
908 StructureChain* chain = structure->prototypeChain(exec, asObject(baseCell));
909 ASSERT(chain);
910 metadata.m_structureChain.set(vm, codeBlock, chain);
911 }
912 }
913 }
914 } else {
915 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
916 metadata.m_oldStructureID = structure->id();
917 metadata.m_offset = slot.cachedOffset();
918 }
919 }
920 }
921
922 LLINT_END();
923}
924
925LLINT_SLOW_PATH_DECL(slow_path_del_by_id)
926{
927 LLINT_BEGIN();
928 auto bytecode = pc->as<OpDelById>();
929 CodeBlock* codeBlock = exec->codeBlock();
930 JSObject* baseObject = getOperand(exec, bytecode.m_base).toObject(exec);
931 LLINT_CHECK_EXCEPTION();
932 bool couldDelete = baseObject->methodTable(vm)->deleteProperty(baseObject, exec, codeBlock->identifier(bytecode.m_property));
933 LLINT_CHECK_EXCEPTION();
934 if (!couldDelete && codeBlock->isStrictMode())
935 LLINT_THROW(createTypeError(exec, UnableToDeletePropertyError));
936 LLINT_RETURN(jsBoolean(couldDelete));
937}
938
939static ALWAYS_INLINE JSValue getByVal(VM& vm, ExecState* exec, OpGetByVal bytecode)
940{
941 JSValue baseValue = getOperand(exec, bytecode.m_base);
942 JSValue subscript = getOperand(exec, bytecode.m_property);
943 auto scope = DECLARE_THROW_SCOPE(vm);
944
945 if (LIKELY(baseValue.isCell() && subscript.isString())) {
946 Structure& structure = *baseValue.asCell()->structure(vm);
947 if (JSCell::canUseFastGetOwnProperty(structure)) {
948 RefPtr<AtomicStringImpl> existingAtomicString = asString(subscript)->toExistingAtomicString(exec);
949 RETURN_IF_EXCEPTION(scope, JSValue());
950 if (existingAtomicString) {
951 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomicString.get()))
952 return result;
953 }
954 }
955 }
956
957 if (subscript.isUInt32()) {
958 uint32_t i = subscript.asUInt32();
959 auto& metadata = bytecode.metadata(exec);
960 ArrayProfile* arrayProfile = &metadata.m_arrayProfile;
961
962 if (isJSString(baseValue)) {
963 if (asString(baseValue)->canGetIndex(i)) {
964 scope.release();
965 return asString(baseValue)->getIndex(exec, i);
966 }
967 arrayProfile->setOutOfBounds();
968 } else if (baseValue.isObject()) {
969 JSObject* object = asObject(baseValue);
970 if (object->canGetIndexQuickly(i))
971 return object->getIndexQuickly(i);
972
973 bool skipMarkingOutOfBounds = false;
974
975 if (object->indexingType() == ArrayWithContiguous && i < object->butterfly()->publicLength()) {
976 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
977 // https://bugs.webkit.org/show_bug.cgi?id=182940
978 auto* globalObject = object->globalObject(vm);
979 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
980 }
981
982 if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i))
983 arrayProfile->setOutOfBounds();
984 }
985
986 scope.release();
987 return baseValue.get(exec, i);
988 }
989
990 baseValue.requireObjectCoercible(exec);
991 RETURN_IF_EXCEPTION(scope, JSValue());
992 auto property = subscript.toPropertyKey(exec);
993 RETURN_IF_EXCEPTION(scope, JSValue());
994 scope.release();
995 return baseValue.get(exec, property);
996}
997
998LLINT_SLOW_PATH_DECL(slow_path_get_by_val)
999{
1000 LLINT_BEGIN();
1001 auto bytecode = pc->as<OpGetByVal>();
1002 LLINT_RETURN_PROFILED(getByVal(vm, exec, bytecode));
1003}
1004
1005LLINT_SLOW_PATH_DECL(slow_path_put_by_val)
1006{
1007 LLINT_BEGIN();
1008
1009 auto bytecode = pc->as<OpPutByVal>();
1010 JSValue baseValue = getOperand(exec, bytecode.m_base);
1011 JSValue subscript = getOperand(exec, bytecode.m_property);
1012 JSValue value = getOperand(exec, bytecode.m_value);
1013 bool isStrictMode = exec->codeBlock()->isStrictMode();
1014
1015 if (LIKELY(subscript.isUInt32())) {
1016 uint32_t i = subscript.asUInt32();
1017 if (baseValue.isObject()) {
1018 JSObject* object = asObject(baseValue);
1019 if (object->canSetIndexQuickly(i))
1020 object->setIndexQuickly(vm, i, value);
1021 else
1022 object->methodTable(vm)->putByIndex(object, exec, i, value, isStrictMode);
1023 LLINT_END();
1024 }
1025 baseValue.putByIndex(exec, i, value, isStrictMode);
1026 LLINT_END();
1027 }
1028
1029 auto property = subscript.toPropertyKey(exec);
1030 LLINT_CHECK_EXCEPTION();
1031 PutPropertySlot slot(baseValue, isStrictMode);
1032 baseValue.put(exec, property, value, slot);
1033 LLINT_END();
1034}
1035
1036LLINT_SLOW_PATH_DECL(slow_path_put_by_val_direct)
1037{
1038 LLINT_BEGIN();
1039
1040 auto bytecode = pc->as<OpPutByValDirect>();
1041 JSValue baseValue = getOperand(exec, bytecode.m_base);
1042 JSValue subscript = getOperand(exec, bytecode.m_property);
1043 JSValue value = getOperand(exec, bytecode.m_value);
1044 RELEASE_ASSERT(baseValue.isObject());
1045 JSObject* baseObject = asObject(baseValue);
1046 bool isStrictMode = exec->codeBlock()->isStrictMode();
1047 if (LIKELY(subscript.isUInt32())) {
1048 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
1049 ASSERT(isIndex(subscript.asUInt32()));
1050 baseObject->putDirectIndex(exec, subscript.asUInt32(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
1051 LLINT_END();
1052 }
1053
1054 if (subscript.isDouble()) {
1055 double subscriptAsDouble = subscript.asDouble();
1056 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
1057 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
1058 baseObject->putDirectIndex(exec, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
1059 LLINT_END();
1060 }
1061 }
1062
1063 // Don't put to an object if toString threw an exception.
1064 auto property = subscript.toPropertyKey(exec);
1065 if (UNLIKELY(throwScope.exception()))
1066 LLINT_END();
1067
1068 if (Optional<uint32_t> index = parseIndex(property))
1069 baseObject->putDirectIndex(exec, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
1070 else {
1071 PutPropertySlot slot(baseObject, isStrictMode);
1072 CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, property, value, slot);
1073 }
1074 LLINT_END();
1075}
1076
1077LLINT_SLOW_PATH_DECL(slow_path_del_by_val)
1078{
1079 LLINT_BEGIN();
1080 auto bytecode = pc->as<OpDelByVal>();
1081 JSValue baseValue = getOperand(exec, bytecode.m_base);
1082 JSObject* baseObject = baseValue.toObject(exec);
1083 LLINT_CHECK_EXCEPTION();
1084
1085 JSValue subscript = getOperand(exec, bytecode.m_property);
1086
1087 bool couldDelete;
1088
1089 uint32_t i;
1090 if (subscript.getUInt32(i))
1091 couldDelete = baseObject->methodTable(vm)->deletePropertyByIndex(baseObject, exec, i);
1092 else {
1093 LLINT_CHECK_EXCEPTION();
1094 auto property = subscript.toPropertyKey(exec);
1095 LLINT_CHECK_EXCEPTION();
1096 couldDelete = baseObject->methodTable(vm)->deleteProperty(baseObject, exec, property);
1097 }
1098 LLINT_CHECK_EXCEPTION();
1099
1100 if (!couldDelete && exec->codeBlock()->isStrictMode())
1101 LLINT_THROW(createTypeError(exec, UnableToDeletePropertyError));
1102
1103 LLINT_RETURN(jsBoolean(couldDelete));
1104}
1105
1106LLINT_SLOW_PATH_DECL(slow_path_put_getter_by_id)
1107{
1108 LLINT_BEGIN();
1109 auto bytecode = pc->as<OpPutGetterById>();
1110 ASSERT(getNonConstantOperand(exec, bytecode.m_base).isObject());
1111 JSObject* baseObj = asObject(getNonConstantOperand(exec, bytecode.m_base));
1112
1113 unsigned options = bytecode.m_attributes;
1114
1115 JSValue getter = getNonConstantOperand(exec, bytecode.m_accessor);
1116 ASSERT(getter.isObject());
1117
1118 baseObj->putGetter(exec, exec->codeBlock()->identifier(bytecode.m_property), asObject(getter), options);
1119 LLINT_END();
1120}
1121
1122LLINT_SLOW_PATH_DECL(slow_path_put_setter_by_id)
1123{
1124 LLINT_BEGIN();
1125 auto bytecode = pc->as<OpPutSetterById>();
1126 ASSERT(getNonConstantOperand(exec, bytecode.m_base).isObject());
1127 JSObject* baseObj = asObject(getNonConstantOperand(exec, bytecode.m_base));
1128
1129 unsigned options = bytecode.m_attributes;
1130
1131 JSValue setter = getNonConstantOperand(exec, bytecode.m_accessor);
1132 ASSERT(setter.isObject());
1133
1134 baseObj->putSetter(exec, exec->codeBlock()->identifier(bytecode.m_property), asObject(setter), options);
1135 LLINT_END();
1136}
1137
1138LLINT_SLOW_PATH_DECL(slow_path_put_getter_setter_by_id)
1139{
1140 LLINT_BEGIN();
1141 auto bytecode = pc->as<OpPutGetterSetterById>();
1142 ASSERT(getNonConstantOperand(exec, bytecode.m_base).isObject());
1143 JSObject* baseObject = asObject(getNonConstantOperand(exec, bytecode.m_base));
1144
1145 JSValue getter = getNonConstantOperand(exec, bytecode.m_getter);
1146 JSValue setter = getNonConstantOperand(exec, bytecode.m_setter);
1147 ASSERT(getter.isObject() || setter.isObject());
1148 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1149
1150 CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, exec->codeBlock()->identifier(bytecode.m_property), accessor, bytecode.m_attributes);
1151 LLINT_END();
1152}
1153
1154LLINT_SLOW_PATH_DECL(slow_path_put_getter_by_val)
1155{
1156 LLINT_BEGIN();
1157 auto bytecode = pc->as<OpPutGetterByVal>();
1158 ASSERT(getNonConstantOperand(exec, bytecode.m_base).isObject());
1159 JSObject* baseObj = asObject(getNonConstantOperand(exec, bytecode.m_base));
1160 JSValue subscript = getOperand(exec, bytecode.m_property);
1161
1162 unsigned options = bytecode.m_attributes;
1163
1164 JSValue getter = getNonConstantOperand(exec, bytecode.m_accessor);
1165 ASSERT(getter.isObject());
1166
1167 auto property = subscript.toPropertyKey(exec);
1168 LLINT_CHECK_EXCEPTION();
1169
1170 baseObj->putGetter(exec, property, asObject(getter), options);
1171 LLINT_END();
1172}
1173
1174LLINT_SLOW_PATH_DECL(slow_path_put_setter_by_val)
1175{
1176 LLINT_BEGIN();
1177 auto bytecode = pc->as<OpPutSetterByVal>();
1178 ASSERT(getNonConstantOperand(exec, bytecode.m_base).isObject());
1179 JSObject* baseObj = asObject(getNonConstantOperand(exec, bytecode.m_base));
1180 JSValue subscript = getOperand(exec, bytecode.m_property);
1181
1182 unsigned options = bytecode.m_attributes;
1183
1184 JSValue setter = getNonConstantOperand(exec, bytecode.m_accessor);
1185 ASSERT(setter.isObject());
1186
1187 auto property = subscript.toPropertyKey(exec);
1188 LLINT_CHECK_EXCEPTION();
1189
1190 baseObj->putSetter(exec, property, asObject(setter), options);
1191 LLINT_END();
1192}
1193
1194LLINT_SLOW_PATH_DECL(slow_path_jtrue)
1195{
1196 LLINT_BEGIN();
1197 auto bytecode = pc->as<OpJtrue>();
1198 LLINT_BRANCH(getOperand(exec, bytecode.m_condition).toBoolean(exec));
1199}
1200
1201LLINT_SLOW_PATH_DECL(slow_path_jfalse)
1202{
1203 LLINT_BEGIN();
1204 auto bytecode = pc->as<OpJfalse>();
1205 LLINT_BRANCH(!getOperand(exec, bytecode.m_condition).toBoolean(exec));
1206}
1207
1208LLINT_SLOW_PATH_DECL(slow_path_jless)
1209{
1210 LLINT_BEGIN();
1211 auto bytecode = pc->as<OpJless>();
1212 LLINT_BRANCH(jsLess<true>(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1213}
1214
1215LLINT_SLOW_PATH_DECL(slow_path_jnless)
1216{
1217 LLINT_BEGIN();
1218 auto bytecode = pc->as<OpJnless>();
1219 LLINT_BRANCH(!jsLess<true>(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1220}
1221
1222LLINT_SLOW_PATH_DECL(slow_path_jgreater)
1223{
1224 LLINT_BEGIN();
1225 auto bytecode = pc->as<OpJgreater>();
1226 LLINT_BRANCH(jsLess<false>(exec, getOperand(exec, bytecode.m_rhs), getOperand(exec, bytecode.m_lhs)));
1227}
1228
1229LLINT_SLOW_PATH_DECL(slow_path_jngreater)
1230{
1231 LLINT_BEGIN();
1232 auto bytecode = pc->as<OpJngreater>();
1233 LLINT_BRANCH(!jsLess<false>(exec, getOperand(exec, bytecode.m_rhs), getOperand(exec, bytecode.m_lhs)));
1234}
1235
1236LLINT_SLOW_PATH_DECL(slow_path_jlesseq)
1237{
1238 LLINT_BEGIN();
1239 auto bytecode = pc->as<OpJlesseq>();
1240 LLINT_BRANCH(jsLessEq<true>(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1241}
1242
1243LLINT_SLOW_PATH_DECL(slow_path_jnlesseq)
1244{
1245 LLINT_BEGIN();
1246 auto bytecode = pc->as<OpJnlesseq>();
1247 LLINT_BRANCH(!jsLessEq<true>(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1248}
1249
1250LLINT_SLOW_PATH_DECL(slow_path_jgreatereq)
1251{
1252 LLINT_BEGIN();
1253 auto bytecode = pc->as<OpJgreatereq>();
1254 LLINT_BRANCH(jsLessEq<false>(exec, getOperand(exec, bytecode.m_rhs), getOperand(exec, bytecode.m_lhs)));
1255}
1256
1257LLINT_SLOW_PATH_DECL(slow_path_jngreatereq)
1258{
1259 LLINT_BEGIN();
1260 auto bytecode = pc->as<OpJngreatereq>();
1261 LLINT_BRANCH(!jsLessEq<false>(exec, getOperand(exec, bytecode.m_rhs), getOperand(exec, bytecode.m_lhs)));
1262}
1263
1264LLINT_SLOW_PATH_DECL(slow_path_jeq)
1265{
1266 LLINT_BEGIN();
1267 auto bytecode = pc->as<OpJeq>();
1268 LLINT_BRANCH(JSValue::equal(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1269}
1270
1271LLINT_SLOW_PATH_DECL(slow_path_jneq)
1272{
1273 LLINT_BEGIN();
1274 auto bytecode = pc->as<OpJneq>();
1275 LLINT_BRANCH(!JSValue::equal(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1276}
1277
1278LLINT_SLOW_PATH_DECL(slow_path_jstricteq)
1279{
1280 LLINT_BEGIN();
1281 auto bytecode = pc->as<OpJstricteq>();
1282 LLINT_BRANCH(JSValue::strictEqual(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1283}
1284
1285LLINT_SLOW_PATH_DECL(slow_path_jnstricteq)
1286{
1287 LLINT_BEGIN();
1288 auto bytecode = pc->as<OpJnstricteq>();
1289 LLINT_BRANCH(!JSValue::strictEqual(exec, getOperand(exec, bytecode.m_lhs), getOperand(exec, bytecode.m_rhs)));
1290}
1291
1292LLINT_SLOW_PATH_DECL(slow_path_switch_imm)
1293{
1294 LLINT_BEGIN();
1295 auto bytecode = pc->as<OpSwitchImm>();
1296 JSValue scrutinee = getOperand(exec, bytecode.m_scrutinee);
1297 ASSERT(scrutinee.isDouble());
1298 double value = scrutinee.asDouble();
1299 int32_t intValue = static_cast<int32_t>(value);
1300 int defaultOffset = JUMP_OFFSET(bytecode.m_defaultOffset);
1301 if (value == intValue) {
1302 CodeBlock* codeBlock = exec->codeBlock();
1303 JUMP_TO(codeBlock->switchJumpTable(bytecode.m_tableIndex).offsetForValue(intValue, defaultOffset));
1304 } else
1305 JUMP_TO(defaultOffset);
1306 LLINT_END();
1307}
1308
1309LLINT_SLOW_PATH_DECL(slow_path_switch_char)
1310{
1311 LLINT_BEGIN();
1312 auto bytecode = pc->as<OpSwitchChar>();
1313 JSValue scrutinee = getOperand(exec, bytecode.m_scrutinee);
1314 ASSERT(scrutinee.isString());
1315 JSString* string = asString(scrutinee);
1316 ASSERT(string->length() == 1);
1317 int defaultOffset = JUMP_OFFSET(bytecode.m_defaultOffset);
1318 StringImpl* impl = string->value(exec).impl();
1319 CodeBlock* codeBlock = exec->codeBlock();
1320 JUMP_TO(codeBlock->switchJumpTable(bytecode.m_tableIndex).offsetForValue((*impl)[0], defaultOffset));
1321 LLINT_END();
1322}
1323
1324LLINT_SLOW_PATH_DECL(slow_path_switch_string)
1325{
1326 LLINT_BEGIN();
1327 auto bytecode = pc->as<OpSwitchString>();
1328 JSValue scrutinee = getOperand(exec, bytecode.m_scrutinee);
1329 int defaultOffset = JUMP_OFFSET(bytecode.m_defaultOffset);
1330 if (!scrutinee.isString())
1331 JUMP_TO(defaultOffset);
1332 else {
1333 CodeBlock* codeBlock = exec->codeBlock();
1334 JUMP_TO(codeBlock->stringSwitchJumpTable(bytecode.m_tableIndex).offsetForValue(asString(scrutinee)->value(exec).impl(), defaultOffset));
1335 }
1336 LLINT_END();
1337}
1338
1339LLINT_SLOW_PATH_DECL(slow_path_new_func)
1340{
1341 LLINT_BEGIN();
1342 auto bytecode = pc->as<OpNewFunc>();
1343 CodeBlock* codeBlock = exec->codeBlock();
1344 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1345 slowPathLogF("Creating function!\n");
1346 LLINT_RETURN(JSFunction::create(vm, codeBlock->functionDecl(bytecode.m_functionDecl), scope));
1347}
1348
1349LLINT_SLOW_PATH_DECL(slow_path_new_generator_func)
1350{
1351 LLINT_BEGIN();
1352 auto bytecode = pc->as<OpNewGeneratorFunc>();
1353 CodeBlock* codeBlock = exec->codeBlock();
1354 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1355 slowPathLogF("Creating function!\n");
1356 LLINT_RETURN(JSGeneratorFunction::create(vm, codeBlock->functionDecl(bytecode.m_functionDecl), scope));
1357}
1358
1359LLINT_SLOW_PATH_DECL(slow_path_new_async_func)
1360{
1361 LLINT_BEGIN();
1362 auto bytecode = pc->as<OpNewAsyncFunc>();
1363 CodeBlock* codeBlock = exec->codeBlock();
1364 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1365 slowPathLogF("Creating async function!\n");
1366 LLINT_RETURN(JSAsyncFunction::create(vm, codeBlock->functionDecl(bytecode.m_functionDecl), scope));
1367}
1368
1369LLINT_SLOW_PATH_DECL(slow_path_new_async_generator_func)
1370{
1371 LLINT_BEGIN();
1372 auto bytecode = pc->as<OpNewAsyncGeneratorFunc>();
1373 CodeBlock* codeBlock = exec->codeBlock();
1374 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1375 slowPathLogF("Creating async generator function!\n");
1376 LLINT_RETURN(JSAsyncGeneratorFunction::create(vm, codeBlock->functionDecl(bytecode.m_functionDecl), scope));
1377}
1378
1379LLINT_SLOW_PATH_DECL(slow_path_new_func_exp)
1380{
1381 LLINT_BEGIN();
1382
1383 auto bytecode = pc->as<OpNewFuncExp>();
1384 CodeBlock* codeBlock = exec->codeBlock();
1385 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1386 FunctionExecutable* executable = codeBlock->functionExpr(bytecode.m_functionDecl);
1387
1388 LLINT_RETURN(JSFunction::create(vm, executable, scope));
1389}
1390
1391LLINT_SLOW_PATH_DECL(slow_path_new_generator_func_exp)
1392{
1393 LLINT_BEGIN();
1394
1395 auto bytecode = pc->as<OpNewGeneratorFuncExp>();
1396 CodeBlock* codeBlock = exec->codeBlock();
1397 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1398 FunctionExecutable* executable = codeBlock->functionExpr(bytecode.m_functionDecl);
1399
1400 LLINT_RETURN(JSGeneratorFunction::create(vm, executable, scope));
1401}
1402
1403LLINT_SLOW_PATH_DECL(slow_path_new_async_func_exp)
1404{
1405 LLINT_BEGIN();
1406
1407 auto bytecode = pc->as<OpNewAsyncFuncExp>();
1408 CodeBlock* codeBlock = exec->codeBlock();
1409 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1410 FunctionExecutable* executable = codeBlock->functionExpr(bytecode.m_functionDecl);
1411
1412 LLINT_RETURN(JSAsyncFunction::create(vm, executable, scope));
1413}
1414
1415LLINT_SLOW_PATH_DECL(slow_path_new_async_generator_func_exp)
1416{
1417 LLINT_BEGIN();
1418
1419 auto bytecode = pc->as<OpNewAsyncGeneratorFuncExp>();
1420 CodeBlock* codeBlock = exec->codeBlock();
1421 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1422 FunctionExecutable* executable = codeBlock->functionExpr(bytecode.m_functionDecl);
1423
1424 LLINT_RETURN(JSAsyncGeneratorFunction::create(vm, executable, scope));
1425}
1426
1427LLINT_SLOW_PATH_DECL(slow_path_set_function_name)
1428{
1429 LLINT_BEGIN();
1430 auto bytecode = pc->as<OpSetFunctionName>();
1431 JSFunction* func = jsCast<JSFunction*>(getNonConstantOperand(exec, bytecode.m_function));
1432 JSValue name = getOperand(exec, bytecode.m_name);
1433 func->setFunctionName(exec, name);
1434 LLINT_END();
1435}
1436
1437static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
1438{
1439 slowPathLog("Performing host call.\n");
1440
1441 ExecState* exec = execCallee->callerFrame();
1442 VM& vm = exec->vm();
1443 auto throwScope = DECLARE_THROW_SCOPE(vm);
1444
1445 execCallee->setCodeBlock(0);
1446 execCallee->clearReturnPC();
1447
1448 if (kind == CodeForCall) {
1449 CallData callData;
1450 CallType callType = getCallData(vm, callee, callData);
1451
1452 ASSERT(callType != CallType::JS);
1453
1454 if (callType == CallType::Host) {
1455 NativeCallFrameTracer tracer(&vm, execCallee);
1456 execCallee->setCallee(asObject(callee));
1457 vm.hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
1458 LLINT_CALL_RETURN(execCallee, execCallee, LLInt::getCodePtr(getHostCallReturnValue), CFunctionPtrTag);
1459 }
1460
1461 slowPathLog("Call callee is not a function: ", callee, "\n");
1462
1463 ASSERT(callType == CallType::None);
1464 LLINT_CALL_THROW(exec, createNotAFunctionError(exec, callee));
1465 }
1466
1467 ASSERT(kind == CodeForConstruct);
1468
1469 ConstructData constructData;
1470 ConstructType constructType = getConstructData(vm, callee, constructData);
1471
1472 ASSERT(constructType != ConstructType::JS);
1473
1474 if (constructType == ConstructType::Host) {
1475 NativeCallFrameTracer tracer(&vm, execCallee);
1476 execCallee->setCallee(asObject(callee));
1477 vm.hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
1478 LLINT_CALL_RETURN(execCallee, execCallee, LLInt::getCodePtr(getHostCallReturnValue), CFunctionPtrTag);
1479 }
1480
1481 slowPathLog("Constructor callee is not a function: ", callee, "\n");
1482
1483 ASSERT(constructType == ConstructType::None);
1484 LLINT_CALL_THROW(exec, createNotAConstructorError(exec, callee));
1485}
1486
1487inline SlowPathReturnType setUpCall(ExecState* execCallee, CodeSpecializationKind kind, JSValue calleeAsValue, LLIntCallLinkInfo* callLinkInfo = nullptr)
1488{
1489 ExecState* exec = execCallee->callerFrame();
1490 VM& vm = exec->vm();
1491 auto throwScope = DECLARE_THROW_SCOPE(vm);
1492
1493 slowPathLogF("Performing call with recorded PC = %p\n", exec->currentVPC());
1494
1495 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1496 if (!calleeAsFunctionCell) {
1497 if (auto* internalFunction = jsDynamicCast<InternalFunction*>(vm, calleeAsValue)) {
1498 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm.getCTIInternalFunctionTrampolineFor(kind);
1499 ASSERT(!!codePtr);
1500
1501 if (!LLINT_ALWAYS_ACCESS_SLOW && callLinkInfo) {
1502 CodeBlock* callerCodeBlock = exec->codeBlock();
1503
1504 ConcurrentJSLocker locker(callerCodeBlock->m_lock);
1505
1506 if (callLinkInfo->isOnList())
1507 callLinkInfo->remove();
1508 callLinkInfo->callee.set(vm, callerCodeBlock, internalFunction);
1509 callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock, internalFunction);
1510 callLinkInfo->machineCodeTarget = codePtr;
1511 }
1512
1513 assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
1514 LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
1515 }
1516 RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, kind));
1517 }
1518 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1519 JSScope* scope = callee->scopeUnchecked();
1520 ExecutableBase* executable = callee->executable();
1521
1522 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1523 CodeBlock* codeBlock = 0;
1524 if (executable->isHostFunction())
1525 codePtr = executable->entrypointFor(kind, MustCheckArity);
1526 else {
1527 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1528
1529 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct)
1530 LLINT_CALL_THROW(exec, createNotAConstructorError(exec, callee));
1531
1532 CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1533 Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(vm, callee, scope, kind, *codeBlockSlot);
1534 EXCEPTION_ASSERT(throwScope.exception() == error);
1535 if (UNLIKELY(error))
1536 LLINT_CALL_THROW(exec, error);
1537 codeBlock = *codeBlockSlot;
1538 ASSERT(codeBlock);
1539 ArityCheckMode arity;
1540 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
1541 arity = MustCheckArity;
1542 else
1543 arity = ArityCheckNotRequired;
1544 codePtr = functionExecutable->entrypointFor(kind, arity);
1545 }
1546
1547 ASSERT(!!codePtr);
1548
1549 if (!LLINT_ALWAYS_ACCESS_SLOW && callLinkInfo) {
1550 CodeBlock* callerCodeBlock = exec->codeBlock();
1551
1552 ConcurrentJSLocker locker(callerCodeBlock->m_lock);
1553
1554 if (callLinkInfo->isOnList())
1555 callLinkInfo->remove();
1556 callLinkInfo->callee.set(vm, callerCodeBlock, callee);
1557 callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock, callee);
1558 callLinkInfo->machineCodeTarget = codePtr;
1559 if (codeBlock)
1560 codeBlock->linkIncomingCall(exec, callLinkInfo);
1561 }
1562
1563 assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
1564 LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
1565}
1566
1567template<typename Op>
1568inline SlowPathReturnType genericCall(ExecState* exec, Op&& bytecode, CodeSpecializationKind kind)
1569{
1570 // This needs to:
1571 // - Set up a call frame.
1572 // - Figure out what to call and compile it if necessary.
1573 // - If possible, link the call's inline cache.
1574 // - Return a tuple of machine code address to call and the new call frame.
1575
1576 JSValue calleeAsValue = getOperand(exec, bytecode.m_callee);
1577
1578 ExecState* execCallee = exec - bytecode.m_argv;
1579
1580 execCallee->setArgumentCountIncludingThis(bytecode.m_argc);
1581 execCallee->uncheckedR(CallFrameSlot::callee) = calleeAsValue;
1582 execCallee->setCallerFrame(exec);
1583
1584 auto& metadata = bytecode.metadata(exec);
1585 return setUpCall(execCallee, kind, calleeAsValue, &metadata.m_callLinkInfo);
1586}
1587
1588LLINT_SLOW_PATH_DECL(slow_path_call)
1589{
1590 LLINT_BEGIN_NO_SET_PC();
1591 RELEASE_AND_RETURN(throwScope, genericCall(exec, pc->as<OpCall>(), CodeForCall));
1592}
1593
1594LLINT_SLOW_PATH_DECL(slow_path_tail_call)
1595{
1596 LLINT_BEGIN_NO_SET_PC();
1597 RELEASE_AND_RETURN(throwScope, genericCall(exec, pc->as<OpTailCall>(), CodeForCall));
1598}
1599
1600LLINT_SLOW_PATH_DECL(slow_path_construct)
1601{
1602 LLINT_BEGIN_NO_SET_PC();
1603 RELEASE_AND_RETURN(throwScope, genericCall(exec, pc->as<OpConstruct>(), CodeForConstruct));
1604}
1605
1606LLINT_SLOW_PATH_DECL(slow_path_size_frame_for_varargs)
1607{
1608 LLINT_BEGIN();
1609 // This needs to:
1610 // - Set up a call frame while respecting the variable arguments.
1611
1612 unsigned numUsedStackSlots;
1613 JSValue arguments;
1614 int firstVarArg;
1615 switch (pc->opcodeID()) {
1616 case op_call_varargs: {
1617 auto bytecode = pc->as<OpCallVarargs>();
1618 numUsedStackSlots = -bytecode.m_firstFree.offset();
1619 arguments = getOperand(exec, bytecode.m_arguments);
1620 firstVarArg = bytecode.m_firstVarArg;
1621 break;
1622 }
1623 case op_tail_call_varargs: {
1624 auto bytecode = pc->as<OpTailCallVarargs>();
1625 numUsedStackSlots = -bytecode.m_firstFree.offset();
1626 arguments = getOperand(exec, bytecode.m_arguments);
1627 firstVarArg = bytecode.m_firstVarArg;
1628 break;
1629 }
1630 case op_construct_varargs: {
1631 auto bytecode = pc->as<OpConstructVarargs>();
1632 numUsedStackSlots = -bytecode.m_firstFree.offset();
1633 arguments = getOperand(exec, bytecode.m_arguments);
1634 firstVarArg = bytecode.m_firstVarArg;
1635 break;
1636 }
1637 default:
1638 RELEASE_ASSERT_NOT_REACHED();
1639 }
1640 unsigned length = sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArg);
1641 LLINT_CALL_CHECK_EXCEPTION(exec, exec);
1642
1643 ExecState* execCallee = calleeFrameForVarargs(exec, numUsedStackSlots, length + 1);
1644 vm.varargsLength = length;
1645 vm.newCallFrameReturnValue = execCallee;
1646
1647 LLINT_RETURN_CALLEE_FRAME(execCallee);
1648}
1649
1650LLINT_SLOW_PATH_DECL(slow_path_size_frame_for_forward_arguments)
1651{
1652 LLINT_BEGIN();
1653 // This needs to:
1654 // - Set up a call frame with the same arguments as the current frame.
1655
1656 auto bytecode = pc->as<OpTailCallForwardArguments>();
1657 unsigned numUsedStackSlots = -bytecode.m_firstFree.offset();
1658
1659 unsigned arguments = sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
1660 LLINT_CALL_CHECK_EXCEPTION(exec, exec);
1661
1662 ExecState* execCallee = calleeFrameForVarargs(exec, numUsedStackSlots, arguments + 1);
1663
1664 vm.varargsLength = arguments;
1665 vm.newCallFrameReturnValue = execCallee;
1666
1667 LLINT_RETURN_CALLEE_FRAME(execCallee);
1668}
1669
1670enum class SetArgumentsWith {
1671 Object,
1672 CurrentArguments
1673};
1674
1675template<typename Op>
1676inline SlowPathReturnType varargsSetup(ExecState* exec, const Instruction* pc, CodeSpecializationKind kind, SetArgumentsWith set)
1677{
1678 LLINT_BEGIN_NO_SET_PC();
1679 // This needs to:
1680 // - Figure out what to call and compile it if necessary.
1681 // - Return a tuple of machine code address to call and the new call frame.
1682
1683 auto bytecode = pc->as<Op>();
1684 JSValue calleeAsValue = getOperand(exec, bytecode.m_callee);
1685
1686 ExecState* execCallee = vm.newCallFrameReturnValue;
1687
1688 if (set == SetArgumentsWith::Object) {
1689 setupVarargsFrameAndSetThis(exec, execCallee, getOperand(exec, bytecode.m_thisValue), getOperand(exec, bytecode.m_arguments), bytecode.m_firstVarArg, vm.varargsLength);
1690 LLINT_CALL_CHECK_EXCEPTION(exec, exec);
1691 } else
1692 setupForwardArgumentsFrameAndSetThis(exec, execCallee, getOperand(exec, bytecode.m_thisValue), vm.varargsLength);
1693
1694 execCallee->setCallerFrame(exec);
1695 execCallee->uncheckedR(CallFrameSlot::callee) = calleeAsValue;
1696 exec->setCurrentVPC(pc);
1697
1698 RELEASE_AND_RETURN(throwScope, setUpCall(execCallee, kind, calleeAsValue));
1699}
1700
1701LLINT_SLOW_PATH_DECL(slow_path_call_varargs)
1702{
1703 return varargsSetup<OpCallVarargs>(exec, pc, CodeForCall, SetArgumentsWith::Object);
1704}
1705
1706LLINT_SLOW_PATH_DECL(slow_path_tail_call_varargs)
1707{
1708 return varargsSetup<OpTailCallVarargs>(exec, pc, CodeForCall, SetArgumentsWith::Object);
1709}
1710
1711LLINT_SLOW_PATH_DECL(slow_path_tail_call_forward_arguments)
1712{
1713 return varargsSetup<OpTailCallForwardArguments>(exec, pc, CodeForCall, SetArgumentsWith::CurrentArguments);
1714}
1715
1716LLINT_SLOW_PATH_DECL(slow_path_construct_varargs)
1717{
1718 return varargsSetup<OpConstructVarargs>(exec, pc, CodeForConstruct, SetArgumentsWith::Object);
1719}
1720
1721inline SlowPathReturnType commonCallEval(ExecState* exec, const Instruction* pc, MacroAssemblerCodePtr<JSEntryPtrTag> returnPoint)
1722{
1723 LLINT_BEGIN_NO_SET_PC();
1724 auto bytecode = pc->as<OpCallEval>();
1725 JSValue calleeAsValue = getNonConstantOperand(exec, bytecode.m_callee);
1726
1727 ExecState* execCallee = exec - bytecode.m_argv;
1728
1729 execCallee->setArgumentCountIncludingThis(bytecode.m_argc);
1730 execCallee->setCallerFrame(exec);
1731 execCallee->uncheckedR(CallFrameSlot::callee) = calleeAsValue;
1732 execCallee->setReturnPC(returnPoint.executableAddress());
1733 execCallee->setCodeBlock(0);
1734 exec->setCurrentVPC(pc);
1735
1736 if (!isHostFunction(calleeAsValue, globalFuncEval))
1737 RELEASE_AND_RETURN(throwScope, setUpCall(execCallee, CodeForCall, calleeAsValue));
1738
1739 vm.hostCallReturnValue = eval(execCallee);
1740 LLINT_CALL_RETURN(exec, execCallee, LLInt::getCodePtr(getHostCallReturnValue), CFunctionPtrTag);
1741}
1742
1743LLINT_SLOW_PATH_DECL(slow_path_call_eval)
1744{
1745 return commonCallEval(exec, pc, LLInt::getCodePtr<JSEntryPtrTag>(llint_generic_return_point));
1746}
1747
1748LLINT_SLOW_PATH_DECL(slow_path_call_eval_wide)
1749{
1750 return commonCallEval(exec, pc, LLInt::getWideCodePtr<JSEntryPtrTag>(llint_generic_return_point));
1751}
1752
1753LLINT_SLOW_PATH_DECL(slow_path_strcat)
1754{
1755 LLINT_BEGIN();
1756 auto bytecode = pc->as<OpStrcat>();
1757 LLINT_RETURN(jsStringFromRegisterArray(exec, &exec->uncheckedR(bytecode.m_src), bytecode.m_count));
1758}
1759
1760LLINT_SLOW_PATH_DECL(slow_path_to_primitive)
1761{
1762 LLINT_BEGIN();
1763 auto bytecode = pc->as<OpToPrimitive>();
1764 LLINT_RETURN(getOperand(exec, bytecode.m_src).toPrimitive(exec));
1765}
1766
1767LLINT_SLOW_PATH_DECL(slow_path_throw)
1768{
1769 LLINT_BEGIN();
1770 auto bytecode = pc->as<OpThrow>();
1771 LLINT_THROW(getOperand(exec, bytecode.m_value));
1772}
1773
1774LLINT_SLOW_PATH_DECL(slow_path_handle_traps)
1775{
1776 LLINT_BEGIN_NO_SET_PC();
1777 ASSERT(vm.needTrapHandling());
1778 vm.handleTraps(exec);
1779 UNUSED_PARAM(pc);
1780 LLINT_RETURN_TWO(throwScope.exception(), exec);
1781}
1782
1783LLINT_SLOW_PATH_DECL(slow_path_debug)
1784{
1785 LLINT_BEGIN();
1786 auto bytecode = pc->as<OpDebug>();
1787 vm.interpreter->debug(exec, bytecode.m_debugHookType);
1788
1789 LLINT_END();
1790}
1791
1792LLINT_SLOW_PATH_DECL(slow_path_handle_exception)
1793{
1794 LLINT_BEGIN_NO_SET_PC();
1795 UNUSED_PARAM(throwScope);
1796 genericUnwind(&vm, exec);
1797 LLINT_END_IMPL();
1798}
1799
1800LLINT_SLOW_PATH_DECL(slow_path_get_from_scope)
1801{
1802 LLINT_BEGIN();
1803 auto bytecode = pc->as<OpGetFromScope>();
1804 auto& metadata = bytecode.metadata(exec);
1805 const Identifier& ident = exec->codeBlock()->identifier(bytecode.m_var);
1806 JSObject* scope = jsCast<JSObject*>(getNonConstantOperand(exec, bytecode.m_scope));
1807
1808 // ModuleVar is always converted to ClosureVar for get_from_scope.
1809 ASSERT(metadata.m_getPutInfo.resolveType() != ModuleVar);
1810
1811 LLINT_RETURN(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
1812 if (!found) {
1813 if (metadata.m_getPutInfo.resolveMode() == ThrowIfNotFound)
1814 return throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
1815 return jsUndefined();
1816 }
1817
1818 JSValue result = JSValue();
1819 if (scope->isGlobalLexicalEnvironment()) {
1820 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1821 result = slot.getValue(exec, ident);
1822 if (result == jsTDZValue())
1823 return throwException(exec, throwScope, createTDZError(exec));
1824 }
1825
1826 CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, bytecode, scope, slot, ident);
1827
1828 if (!result)
1829 return slot.getValue(exec, ident);
1830 return result;
1831 }));
1832}
1833
1834LLINT_SLOW_PATH_DECL(slow_path_put_to_scope)
1835{
1836 LLINT_BEGIN();
1837
1838 auto bytecode = pc->as<OpPutToScope>();
1839 auto& metadata = bytecode.metadata(exec);
1840 CodeBlock* codeBlock = exec->codeBlock();
1841 const Identifier& ident = codeBlock->identifier(bytecode.m_var);
1842 JSObject* scope = jsCast<JSObject*>(getNonConstantOperand(exec, bytecode.m_scope));
1843 JSValue value = getOperand(exec, bytecode.m_value);
1844 if (metadata.m_getPutInfo.resolveType() == LocalClosureVar) {
1845 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
1846 environment->variableAt(ScopeOffset(metadata.m_operand)).set(vm, environment, value);
1847
1848 // Have to do this *after* the write, because if this puts the set into IsWatched, then we need
1849 // to have already changed the value of the variable. Otherwise we might watch and constant-fold
1850 // to the Undefined value from before the assignment.
1851 if (metadata.m_watchpointSet)
1852 metadata.m_watchpointSet->touch(vm, "Executed op_put_scope<LocalClosureVar>");
1853 LLINT_END();
1854 }
1855
1856 bool hasProperty = scope->hasProperty(exec, ident);
1857 LLINT_CHECK_EXCEPTION();
1858 if (hasProperty
1859 && scope->isGlobalLexicalEnvironment()
1860 && !isInitialization(metadata.m_getPutInfo.initializationMode())) {
1861 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
1862 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
1863 JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
1864 if (slot.getValue(exec, ident) == jsTDZValue())
1865 LLINT_THROW(createTDZError(exec));
1866 }
1867
1868 if (metadata.m_getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty)
1869 LLINT_THROW(createUndefinedVariableError(exec, ident));
1870
1871 PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(metadata.m_getPutInfo.initializationMode()));
1872 scope->methodTable(vm)->put(scope, exec, ident, value, slot);
1873
1874 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, bytecode, scope, slot, ident);
1875
1876 LLINT_END();
1877}
1878
1879LLINT_SLOW_PATH_DECL(slow_path_check_if_exception_is_uncatchable_and_notify_profiler)
1880{
1881 LLINT_BEGIN();
1882 RELEASE_ASSERT(!!throwScope.exception());
1883
1884 if (isTerminatedExecutionException(vm, throwScope.exception()))
1885 LLINT_RETURN_TWO(pc, bitwise_cast<void*>(static_cast<uintptr_t>(1)));
1886 LLINT_RETURN_TWO(pc, 0);
1887}
1888
1889LLINT_SLOW_PATH_DECL(slow_path_log_shadow_chicken_prologue)
1890{
1891 LLINT_BEGIN();
1892
1893 auto bytecode = pc->as<OpLogShadowChickenPrologue>();
1894 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1895 ShadowChicken* shadowChicken = vm.shadowChicken();
1896 RELEASE_ASSERT(shadowChicken);
1897 shadowChicken->log(vm, exec, ShadowChicken::Packet::prologue(exec->jsCallee(), exec, exec->callerFrame(), scope));
1898
1899 LLINT_END();
1900}
1901
1902LLINT_SLOW_PATH_DECL(slow_path_log_shadow_chicken_tail)
1903{
1904 LLINT_BEGIN();
1905
1906 auto bytecode = pc->as<OpLogShadowChickenTail>();
1907 JSValue thisValue = getNonConstantOperand(exec, bytecode.m_thisValue);
1908 JSScope* scope = exec->uncheckedR(bytecode.m_scope).Register::scope();
1909
1910#if USE(JSVALUE64)
1911 CallSiteIndex callSiteIndex(exec->codeBlock()->bytecodeOffset(pc));
1912#else
1913 CallSiteIndex callSiteIndex(pc);
1914#endif
1915 ShadowChicken* shadowChicken = vm.shadowChicken();
1916 RELEASE_ASSERT(shadowChicken);
1917 shadowChicken->log(vm, exec, ShadowChicken::Packet::tail(exec, thisValue, scope, exec->codeBlock(), callSiteIndex));
1918
1919 LLINT_END();
1920}
1921
1922LLINT_SLOW_PATH_DECL(slow_path_profile_catch)
1923{
1924 LLINT_BEGIN();
1925
1926 exec->codeBlock()->ensureCatchLivenessIsComputedForBytecodeOffset(exec->bytecodeOffset());
1927
1928 auto bytecode = pc->as<OpCatch>();
1929 auto& metadata = bytecode.metadata(exec);
1930 metadata.m_buffer->forEach([&] (ValueProfileAndOperand& profile) {
1931 profile.m_profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1932 });
1933
1934 LLINT_END();
1935}
1936
1937LLINT_SLOW_PATH_DECL(slow_path_super_sampler_begin)
1938{
1939 // FIXME: It seems like we should be able to do this in asm but llint doesn't seem to like global variables.
1940 // See: https://bugs.webkit.org/show_bug.cgi?id=179438
1941 UNUSED_PARAM(exec);
1942 g_superSamplerCount++;
1943 LLINT_END_IMPL();
1944}
1945
1946LLINT_SLOW_PATH_DECL(slow_path_super_sampler_end)
1947{
1948 // FIXME: It seems like we should be able to do this in asm but llint doesn't seem to like global variables.
1949 // See: https://bugs.webkit.org/show_bug.cgi?id=179438
1950 UNUSED_PARAM(exec);
1951 g_superSamplerCount--;
1952 LLINT_END_IMPL();
1953}
1954
1955LLINT_SLOW_PATH_DECL(slow_path_out_of_line_jump_target)
1956{
1957 CodeBlock* codeBlock = exec->codeBlock();
1958 pc = codeBlock->outOfLineJumpTarget(pc);
1959 LLINT_END_IMPL();
1960}
1961
1962extern "C" SlowPathReturnType llint_throw_stack_overflow_error(VM* vm, ProtoCallFrame* protoFrame)
1963{
1964 ExecState* exec = vm->topCallFrame;
1965 auto scope = DECLARE_THROW_SCOPE(*vm);
1966
1967 if (!exec)
1968 exec = protoFrame->callee()->globalObject(*vm)->globalExec();
1969 throwStackOverflowError(exec, scope);
1970 return encodeResult(0, 0);
1971}
1972
1973#if ENABLE(C_LOOP)
1974extern "C" SlowPathReturnType llint_stack_check_at_vm_entry(VM* vm, Register* newTopOfStack)
1975{
1976 bool success = vm->ensureStackCapacityFor(newTopOfStack);
1977 return encodeResult(reinterpret_cast<void*>(success), 0);
1978}
1979#endif
1980
1981extern "C" void llint_write_barrier_slow(ExecState* exec, JSCell* cell)
1982{
1983 VM& vm = exec->vm();
1984 vm.heap.writeBarrier(cell);
1985}
1986
1987extern "C" NO_RETURN_DUE_TO_CRASH void llint_crash()
1988{
1989 CRASH();
1990}
1991
1992} } // namespace JSC::LLInt
1993