1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include "config.h"
32#include "BytecodeGenerator.h"
33
34#include "ArithProfile.h"
35#include "BuiltinExecutables.h"
36#include "BuiltinNames.h"
37#include "BytecodeGeneratorification.h"
38#include "BytecodeLivenessAnalysis.h"
39#include "BytecodeStructs.h"
40#include "BytecodeUseDef.h"
41#include "CatchScope.h"
42#include "DefinePropertyAttributes.h"
43#include "Interpreter.h"
44#include "JSAsyncGeneratorFunction.h"
45#include "JSBigInt.h"
46#include "JSCInlines.h"
47#include "JSFixedArray.h"
48#include "JSFunction.h"
49#include "JSGeneratorFunction.h"
50#include "JSImmutableButterfly.h"
51#include "JSLexicalEnvironment.h"
52#include "JSTemplateObjectDescriptor.h"
53#include "LowLevelInterpreter.h"
54#include "Options.h"
55#include "PreciseJumpTargetsInlines.h"
56#include "StackAlignment.h"
57#include "StrongInlines.h"
58#include "SuperSamplerBytecodeScope.h"
59#include "UnlinkedCodeBlock.h"
60#include "UnlinkedEvalCodeBlock.h"
61#include "UnlinkedFunctionCodeBlock.h"
62#include "UnlinkedMetadataTableInlines.h"
63#include "UnlinkedModuleProgramCodeBlock.h"
64#include "UnlinkedProgramCodeBlock.h"
65#include <wtf/BitVector.h>
66#include <wtf/CommaPrinter.h>
67#include <wtf/Optional.h>
68#include <wtf/SmallPtrSet.h>
69#include <wtf/StdLibExtras.h>
70#include <wtf/text/WTFString.h>
71
72namespace JSC {
73
74template<typename CallOp, typename = std::true_type>
75struct VarArgsOp;
76
77template<typename CallOp>
78struct VarArgsOp<CallOp, std::enable_if_t<std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
79 using type = OpTailCallVarargs;
80};
81
82
83template<typename CallOp>
84struct VarArgsOp<CallOp, std::enable_if_t<!std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
85 using type = OpCallVarargs;
86};
87
88
89template<typename T>
90static inline void shrinkToFit(T& segmentedVector)
91{
92 while (segmentedVector.size() && !segmentedVector.last().refCount())
93 segmentedVector.removeLast();
94}
95
96void Label::setLocation(BytecodeGenerator& generator, unsigned location)
97{
98 m_location = location;
99
100 for (auto offset : m_unresolvedJumps) {
101 auto instruction = generator.m_writer.ref(offset);
102 int target = m_location - offset;
103
104#define CASE(__op) \
105 case __op::opcodeID: \
106 instruction->cast<__op>()->setTargetLabel(BoundLabel(target), [&]() { \
107 generator.m_codeBlock->addOutOfLineJumpTarget(instruction.offset(), target); \
108 return BoundLabel(); \
109 }); \
110 break;
111
112 switch (instruction->opcodeID()) {
113 CASE(OpJmp)
114 CASE(OpJtrue)
115 CASE(OpJfalse)
116 CASE(OpJeqNull)
117 CASE(OpJneqNull)
118 CASE(OpJeq)
119 CASE(OpJstricteq)
120 CASE(OpJneq)
121 CASE(OpJneqPtr)
122 CASE(OpJnstricteq)
123 CASE(OpJless)
124 CASE(OpJlesseq)
125 CASE(OpJgreater)
126 CASE(OpJgreatereq)
127 CASE(OpJnless)
128 CASE(OpJnlesseq)
129 CASE(OpJngreater)
130 CASE(OpJngreatereq)
131 CASE(OpJbelow)
132 CASE(OpJbeloweq)
133 default:
134 ASSERT_NOT_REACHED();
135 }
136#undef CASE
137 }
138}
139
140int BoundLabel::target()
141{
142 switch (m_type) {
143 case Offset:
144 return m_target;
145 case GeneratorBackward:
146 return m_target - m_generator->m_writer.position();
147 case GeneratorForward:
148 return 0;
149 default:
150 RELEASE_ASSERT_NOT_REACHED();
151 }
152}
153
154int BoundLabel::saveTarget()
155{
156 if (m_type == GeneratorForward) {
157 m_savedTarget = m_generator->m_writer.position();
158 return 0;
159 }
160
161 m_savedTarget = target();
162 return m_savedTarget;
163}
164
165int BoundLabel::commitTarget()
166{
167 if (m_type == GeneratorForward) {
168 m_label->m_unresolvedJumps.append(m_savedTarget);
169 return 0;
170 }
171
172 return m_savedTarget;
173}
174
175void Variable::dump(PrintStream& out) const
176{
177 out.print(
178 "{ident = ", m_ident,
179 ", offset = ", m_offset,
180 ", local = ", RawPointer(m_local),
181 ", attributes = ", m_attributes,
182 ", kind = ", m_kind,
183 ", symbolTableConstantIndex = ", m_symbolTableConstantIndex,
184 ", isLexicallyScoped = ", m_isLexicallyScoped, "}");
185}
186
187FinallyContext::FinallyContext(BytecodeGenerator& generator, Label& finallyLabel)
188 : m_outerContext(generator.m_currentFinallyContext)
189 , m_finallyLabel(&finallyLabel)
190{
191 ASSERT(m_jumps.isEmpty());
192 m_completionRecord.typeRegister = generator.newTemporary();
193 m_completionRecord.valueRegister = generator.newTemporary();
194 generator.emitLoad(completionTypeRegister(), CompletionType::Normal);
195 generator.moveEmptyValue(completionValueRegister());
196}
197
198ParserError BytecodeGenerator::generate()
199{
200 m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
201
202 emitLogShadowChickenPrologueIfNecessary();
203
204 // If we have declared a variable named "arguments" and we are using arguments then we should
205 // perform that assignment now.
206 if (m_needToInitializeArguments)
207 initializeVariable(variable(propertyNames().arguments), m_argumentsRegister);
208
209 if (m_restParameter)
210 m_restParameter->emit(*this);
211
212 {
213 RefPtr<RegisterID> temp = newTemporary();
214 RefPtr<RegisterID> tolLevelScope;
215 for (auto functionPair : m_functionsToInitialize) {
216 FunctionMetadataNode* metadata = functionPair.first;
217 FunctionVariableType functionType = functionPair.second;
218 emitNewFunction(temp.get(), metadata);
219 if (functionType == NormalFunctionVariable)
220 initializeVariable(variable(metadata->ident()), temp.get());
221 else if (functionType == TopLevelFunctionVariable) {
222 if (!tolLevelScope) {
223 // We know this will resolve to the top level scope or global object because our parser/global initialization code
224 // doesn't allow let/const/class variables to have the same names as functions.
225 // This is a top level function, and it's an error to ever create a top level function
226 // name that would resolve to a lexical variable. E.g:
227 // ```
228 // function f() {
229 // {
230 // let x;
231 // {
232 // //// error thrown here
233 // eval("function x(){}");
234 // }
235 // }
236 // }
237 // ```
238 // Therefore, we're guaranteed to have this resolve to a top level variable.
239 RefPtr<RegisterID> tolLevelObjectScope = emitResolveScope(nullptr, Variable(metadata->ident()));
240 tolLevelScope = newBlockScopeVariable();
241 move(tolLevelScope.get(), tolLevelObjectScope.get());
242 }
243 emitPutToScope(tolLevelScope.get(), Variable(metadata->ident()), temp.get(), ThrowIfNotFound, InitializationMode::NotInitialization);
244 } else
245 RELEASE_ASSERT_NOT_REACHED();
246 }
247 }
248
249 bool callingClassConstructor = constructorKind() != ConstructorKind::None && !isConstructor();
250 if (!callingClassConstructor)
251 m_scopeNode->emitBytecode(*this);
252 else {
253 // At this point we would have emitted an unconditional throw followed by some nonsense that's
254 // just an artifact of how this generator is structured. That code never runs, but it confuses
255 // bytecode analyses because it constitutes an unterminated basic block. So, we terminate the
256 // basic block the strongest way possible.
257 emitUnreachable();
258 }
259
260 for (auto& handler : m_exceptionHandlersToEmit) {
261 Ref<Label> realCatchTarget = newLabel();
262 TryData* tryData = handler.tryData;
263
264 OpCatch::emit(this, handler.exceptionRegister, handler.thrownValueRegister);
265 realCatchTarget->setLocation(*this, m_lastInstruction.offset());
266 if (handler.completionTypeRegister.isValid()) {
267 RegisterID completionTypeRegister { handler.completionTypeRegister };
268 CompletionType completionType =
269 tryData->handlerType == HandlerType::Finally || tryData->handlerType == HandlerType::SynthesizedFinally
270 ? CompletionType::Throw
271 : CompletionType::Normal;
272 emitLoad(&completionTypeRegister, completionType);
273 }
274 m_codeBlock->addJumpTarget(m_lastInstruction.offset());
275
276
277 emitJump(tryData->target.get());
278 tryData->target = WTFMove(realCatchTarget);
279 }
280
281 m_staticPropertyAnalyzer.kill();
282
283 for (auto& range : m_tryRanges) {
284 int start = range.start->bind();
285 int end = range.end->bind();
286
287 // This will happen for empty try blocks and for some cases of finally blocks:
288 //
289 // try {
290 // try {
291 // } finally {
292 // return 42;
293 // // *HERE*
294 // }
295 // } finally {
296 // print("things");
297 // }
298 //
299 // The return will pop scopes to execute the outer finally block. But this includes
300 // popping the try context for the inner try. The try context is live in the fall-through
301 // part of the finally block not because we will emit a handler that overlaps the finally,
302 // but because we haven't yet had a chance to plant the catch target. Then when we finish
303 // emitting code for the outer finally block, we repush the try contex, this time with a
304 // new start index. But that means that the start index for the try range corresponding
305 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
306 // than the end index of the try block. This is harmless since end < start handlers will
307 // never get matched in our logic, but we do the runtime a favor and choose to not emit
308 // such handlers at all.
309 if (end <= start)
310 continue;
311
312 UnlinkedHandlerInfo info(static_cast<uint32_t>(start), static_cast<uint32_t>(end),
313 static_cast<uint32_t>(range.tryData->target->bind()), range.tryData->handlerType);
314 m_codeBlock->addExceptionHandler(info);
315 }
316
317
318 if (isGeneratorOrAsyncFunctionBodyParseMode(m_codeBlock->parseMode()))
319 performGeneratorification(*this, m_codeBlock.get(), m_writer, m_generatorFrameSymbolTable.get(), m_generatorFrameSymbolTableIndex);
320
321 RELEASE_ASSERT(static_cast<unsigned>(m_codeBlock->numCalleeLocals()) < static_cast<unsigned>(FirstConstantRegisterIndex));
322 m_codeBlock->setInstructions(m_writer.finalize());
323
324 m_codeBlock->shrinkToFit();
325
326 if (m_expressionTooDeep)
327 return ParserError(ParserError::OutOfMemory);
328 return ParserError(ParserError::ErrorNone);
329}
330
331BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
332 : m_codeGenerationMode(codeGenerationMode)
333 , m_scopeNode(programNode)
334 , m_codeBlock(vm, codeBlock)
335 , m_thisRegister(CallFrame::thisArgumentOffset())
336 , m_codeType(GlobalCode)
337 , m_vm(&vm)
338 , m_needsToUpdateArrowFunctionContext(programNode->usesArrowFunction() || programNode->usesEval())
339{
340 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
341
342 for (auto& constantRegister : m_linkTimeConstantRegisters)
343 constantRegister = nullptr;
344
345 allocateCalleeSaveSpace();
346
347 m_codeBlock->setNumParameters(1); // Allocate space for "this"
348
349 emitEnter();
350
351 allocateAndEmitScope();
352
353 emitCheckTraps();
354
355 const FunctionStack& functionStack = programNode->functionStack();
356
357 for (auto* function : functionStack)
358 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
359
360 if (Options::validateBytecode()) {
361 for (auto& entry : programNode->varDeclarations())
362 RELEASE_ASSERT(entry.value.isVar());
363 }
364 codeBlock->setVariableDeclarations(programNode->varDeclarations());
365 codeBlock->setLexicalDeclarations(programNode->lexicalVariables());
366 // Even though this program may have lexical variables that go under TDZ, when linking the get_from_scope/put_to_scope
367 // operations we emit we will have ResolveTypes that implictly do TDZ checks. Therefore, we don't need
368 // additional TDZ checks on top of those. This is why we can omit pushing programNode->lexicalVariables()
369 // to the TDZ stack.
370
371 if (needsToUpdateArrowFunctionContext()) {
372 initializeArrowFunctionContextScopeIfNeeded();
373 emitPutThisToArrowFunctionContextScope();
374 }
375}
376
377BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionNode* functionNode, UnlinkedFunctionCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
378 : m_codeGenerationMode(codeGenerationMode)
379 , m_scopeNode(functionNode)
380 , m_codeBlock(vm, codeBlock)
381 , m_codeType(FunctionCode)
382 , m_vm(&vm)
383 , m_isBuiltinFunction(codeBlock->isBuiltinFunction())
384 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
385 // FIXME: We should be able to have tail call elimination with the profiler
386 // enabled. This is currently not possible because the profiler expects
387 // op_will_call / op_did_call pairs before and after a call, which are not
388 // compatible with tail calls (we have no way of emitting op_did_call).
389 // https://bugs.webkit.org/show_bug.cgi?id=148819
390 , m_inTailPosition(Options::useTailCalls() && !isConstructor() && constructorKind() == ConstructorKind::None && isStrictMode())
391 , m_needsToUpdateArrowFunctionContext(functionNode->usesArrowFunction() || functionNode->usesEval())
392 , m_derivedContextType(codeBlock->derivedContextType())
393{
394 for (auto& constantRegister : m_linkTimeConstantRegisters)
395 constantRegister = nullptr;
396
397 allocateCalleeSaveSpace();
398
399 SymbolTable* functionSymbolTable = SymbolTable::create(*m_vm);
400 functionSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
401 int symbolTableConstantIndex = 0;
402
403 FunctionParameters& parameters = *functionNode->parameters();
404 // http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
405 // This implements IsSimpleParameterList in the Ecma 2015 spec.
406 // If IsSimpleParameterList is false, we will create a strict-mode like arguments object.
407 // IsSimpleParameterList is false if the argument list contains any default parameter values,
408 // a rest parameter, or any destructuring patterns.
409 // If we do have default parameters, destructuring parameters, or a rest parameter, our parameters will be allocated in a different scope.
410 bool isSimpleParameterList = parameters.isSimpleParameterList();
411
412 SourceParseMode parseMode = codeBlock->parseMode();
413
414 bool containsArrowOrEvalButNotInArrowBlock = ((functionNode->usesArrowFunction() && functionNode->doAnyInnerArrowFunctionsUseAnyFeature()) || functionNode->usesEval()) && !m_codeBlock->isArrowFunction();
415 bool shouldCaptureSomeOfTheThings = shouldEmitDebugHooks() || functionNode->needsActivation() || containsArrowOrEvalButNotInArrowBlock;
416
417 bool shouldCaptureAllOfTheThings = shouldEmitDebugHooks() || codeBlock->usesEval();
418 bool needsArguments = ((functionNode->usesArguments() && !codeBlock->isArrowFunction()) || codeBlock->usesEval() || (functionNode->usesArrowFunction() && !codeBlock->isArrowFunction() && isArgumentsUsedInInnerArrowFunction()));
419
420 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
421 // Generator and AsyncFunction never provides "arguments". "arguments" reference will be resolved in an upper generator function scope.
422 needsArguments = false;
423 }
424
425 if (isGeneratorOrAsyncFunctionWrapperParseMode(parseMode) && needsArguments) {
426 // Generator does not provide "arguments". Instead, wrapping GeneratorFunction provides "arguments".
427 // This is because arguments of a generator should be evaluated before starting it.
428 // To workaround it, we evaluate these arguments as arguments of a wrapping generator function, and reference it from a generator.
429 //
430 // function *gen(a, b = hello())
431 // {
432 // return {
433 // @generatorNext: function (@generator, @generatorState, @generatorValue, @generatorResumeMode, @generatorFrame)
434 // {
435 // arguments; // This `arguments` should reference to the gen's arguments.
436 // ...
437 // }
438 // }
439 // }
440 shouldCaptureSomeOfTheThings = true;
441 }
442
443 if (shouldCaptureAllOfTheThings)
444 functionNode->varDeclarations().markAllVariablesAsCaptured();
445
446 auto captures = scopedLambda<bool (UniquedStringImpl*)>([&] (UniquedStringImpl* uid) -> bool {
447 if (!shouldCaptureSomeOfTheThings)
448 return false;
449 if (needsArguments && uid == propertyNames().arguments.impl()) {
450 // Actually, we only need to capture the arguments object when we "need full activation"
451 // because of name scopes. But historically we did it this way, so for now we just preserve
452 // the old behavior.
453 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=143072
454 return true;
455 }
456 return functionNode->captures(uid);
457 });
458 auto varKind = [&] (UniquedStringImpl* uid) -> VarKind {
459 return captures(uid) ? VarKind::Scope : VarKind::Stack;
460 };
461
462 m_calleeRegister.setIndex(CallFrameSlot::callee);
463
464 initializeParameters(parameters);
465 ASSERT(!(isSimpleParameterList && m_restParameter));
466
467 emitEnter();
468
469 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode))
470 m_generatorRegister = &m_parameters[1];
471
472 allocateAndEmitScope();
473
474 emitCheckTraps();
475
476 if (functionNameIsInScope(functionNode->ident(), functionNode->functionMode())) {
477 ASSERT(parseMode != SourceParseMode::GeneratorBodyMode);
478 ASSERT(!isAsyncFunctionBodyParseMode(parseMode));
479 bool isDynamicScope = functionNameScopeIsDynamic(codeBlock->usesEval(), codeBlock->isStrictMode());
480 bool isFunctionNameCaptured = captures(functionNode->ident().impl());
481 bool markAsCaptured = isDynamicScope || isFunctionNameCaptured;
482 emitPushFunctionNameScope(functionNode->ident(), &m_calleeRegister, markAsCaptured);
483 }
484
485 if (shouldCaptureSomeOfTheThings)
486 m_lexicalEnvironmentRegister = addVar();
487
488 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode) || shouldCaptureSomeOfTheThings || shouldEmitTypeProfilerHooks())
489 symbolTableConstantIndex = addConstantValue(functionSymbolTable)->index();
490
491 // We can allocate the "var" environment if we don't have default parameter expressions. If we have
492 // default parameter expressions, we have to hold off on allocating the "var" environment because
493 // the parent scope of the "var" environment is the parameter environment.
494 if (isSimpleParameterList)
495 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, shouldCaptureSomeOfTheThings);
496
497 // Figure out some interesting facts about our arguments.
498 bool capturesAnyArgumentByName = false;
499 if (functionNode->hasCapturedVariables()) {
500 FunctionParameters& parameters = *functionNode->parameters();
501 for (size_t i = 0; i < parameters.size(); ++i) {
502 auto pattern = parameters.at(i).first;
503 if (!pattern->isBindingNode())
504 continue;
505 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
506 capturesAnyArgumentByName |= captures(ident.impl());
507 }
508 }
509
510 if (capturesAnyArgumentByName)
511 ASSERT(m_lexicalEnvironmentRegister);
512
513 // Need to know what our functions are called. Parameters have some goofy behaviors when it
514 // comes to functions of the same name.
515 for (FunctionMetadataNode* function : functionNode->functionStack())
516 m_functions.add(function->ident().impl());
517
518 if (needsArguments) {
519 // Create the arguments object now. We may put the arguments object into the activation if
520 // it is captured. Either way, we create two arguments object variables: one is our
521 // private variable that is immutable, and another that is the user-visible variable. The
522 // immutable one is only used here, or during formal parameter resolutions if we opt for
523 // DirectArguments.
524
525 m_argumentsRegister = addVar();
526 m_argumentsRegister->ref();
527 }
528
529 if (needsArguments && !codeBlock->isStrictMode() && isSimpleParameterList) {
530 // If we captured any formal parameter by name, then we use ScopedArguments. Otherwise we
531 // use DirectArguments. With ScopedArguments, we lift all of our arguments into the
532 // activation.
533
534 if (capturesAnyArgumentByName) {
535 functionSymbolTable->setArgumentsLength(vm, parameters.size());
536
537 // For each parameter, we have two possibilities:
538 // Either it's a binding node with no function overlap, in which case it gets a name
539 // in the symbol table - or it just gets space reserved in the symbol table. Either
540 // way we lift the value into the scope.
541 for (unsigned i = 0; i < parameters.size(); ++i) {
542 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
543 functionSymbolTable->setArgumentOffset(vm, i, offset);
544 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first)) {
545 VarOffset varOffset(offset);
546 SymbolTableEntry entry(varOffset);
547 // Stores to these variables via the ScopedArguments object will not do
548 // notifyWrite(), since that would be cumbersome. Also, watching formal
549 // parameters when "arguments" is in play is unlikely to be super profitable.
550 // So, we just disable it.
551 entry.disableWatching(*m_vm);
552 functionSymbolTable->set(NoLockingNecessary, name, entry);
553 }
554 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, UINT_MAX, virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), SymbolTableOrScopeDepth::symbolTable(VirtualRegister { symbolTableConstantIndex }), offset.offset());
555 }
556
557 // This creates a scoped arguments object and copies the overflow arguments into the
558 // scope. It's the equivalent of calling ScopedArguments::createByCopying().
559 OpCreateScopedArguments::emit(this, m_argumentsRegister, m_lexicalEnvironmentRegister);
560 } else {
561 // We're going to put all parameters into the DirectArguments object. First ensure
562 // that the symbol table knows that this is happening.
563 for (unsigned i = 0; i < parameters.size(); ++i) {
564 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first))
565 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(DirectArgumentsOffset(i))));
566 }
567
568 OpCreateDirectArguments::emit(this, m_argumentsRegister);
569 }
570 } else if (isSimpleParameterList) {
571 // Create the formal parameters the normal way. Any of them could be captured, or not. If
572 // captured, lift them into the scope. We cannot do this if we have default parameter expressions
573 // because when default parameter expressions exist, they belong in their own lexical environment
574 // separate from the "var" lexical environment.
575 for (unsigned i = 0; i < parameters.size(); ++i) {
576 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
577 if (!name)
578 continue;
579
580 if (!captures(name)) {
581 // This is the easy case - just tell the symbol table about the argument. It will
582 // be accessed directly.
583 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(virtualRegisterForArgument(1 + i))));
584 continue;
585 }
586
587 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
588 const Identifier& ident =
589 static_cast<const BindingNode*>(parameters.at(i).first)->boundProperty();
590 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(offset)));
591
592 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, addConstant(ident), virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), SymbolTableOrScopeDepth::symbolTable(VirtualRegister { symbolTableConstantIndex }), offset.offset());
593 }
594 }
595
596 if (needsArguments && (codeBlock->isStrictMode() || !isSimpleParameterList)) {
597 // Allocate a cloned arguments object.
598 OpCreateClonedArguments::emit(this, m_argumentsRegister);
599 }
600
601 // There are some variables that need to be preinitialized to something other than Undefined:
602 //
603 // - "arguments": unless it's used as a function or parameter, this should refer to the
604 // arguments object.
605 //
606 // - functions: these always override everything else.
607 //
608 // The most logical way to do all of this is to initialize none of the variables until now,
609 // and then initialize them in BytecodeGenerator::generate() in such an order that the rules
610 // for how these things override each other end up holding. We would initialize "arguments" first,
611 // then all arguments, then the functions.
612 //
613 // But some arguments are already initialized by default, since if they aren't captured and we
614 // don't have "arguments" then we just point the symbol table at the stack slot of those
615 // arguments. We end up initializing the rest of the arguments that have an uncomplicated
616 // binding (i.e. don't involve destructuring) above when figuring out how to lay them out,
617 // because that's just the simplest thing. This means that when we initialize them, we have to
618 // watch out for the things that override arguments (namely, functions).
619
620 // This is our final act of weirdness. "arguments" is overridden by everything except the
621 // callee. We add it to the symbol table if it's not already there and it's not an argument.
622 bool shouldCreateArgumentsVariableInParameterScope = false;
623 if (needsArguments) {
624 // If "arguments" is overridden by a function or destructuring parameter name, then it's
625 // OK for us to call createVariable() because it won't change anything. It's also OK for
626 // us to them tell BytecodeGenerator::generate() to write to it because it will do so
627 // before it initializes functions and destructuring parameters. But if "arguments" is
628 // overridden by a "simple" function parameter, then we have to bail: createVariable()
629 // would assert and BytecodeGenerator::generate() would write the "arguments" after the
630 // argument value had already been properly initialized.
631
632 bool haveParameterNamedArguments = false;
633 for (unsigned i = 0; i < parameters.size(); ++i) {
634 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
635 if (name == propertyNames().arguments.impl()) {
636 haveParameterNamedArguments = true;
637 break;
638 }
639 }
640
641 bool shouldCreateArgumensVariable = !haveParameterNamedArguments
642 && !SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(m_codeBlock->parseMode());
643 shouldCreateArgumentsVariableInParameterScope = shouldCreateArgumensVariable && !isSimpleParameterList;
644 // Do not create arguments variable in case of Arrow function. Value will be loaded from parent scope
645 if (shouldCreateArgumensVariable && !shouldCreateArgumentsVariableInParameterScope) {
646 createVariable(
647 propertyNames().arguments, varKind(propertyNames().arguments.impl()), functionSymbolTable);
648
649 m_needToInitializeArguments = true;
650 }
651 }
652
653 for (FunctionMetadataNode* function : functionNode->functionStack()) {
654 const Identifier& ident = function->ident();
655 createVariable(ident, varKind(ident.impl()), functionSymbolTable);
656 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
657 }
658 for (auto& entry : functionNode->varDeclarations()) {
659 ASSERT(!entry.value.isLet() && !entry.value.isConst());
660 if (!entry.value.isVar()) // This is either a parameter or callee.
661 continue;
662 if (shouldCreateArgumentsVariableInParameterScope && entry.key.get() == propertyNames().arguments.impl())
663 continue;
664 createVariable(Identifier::fromUid(m_vm, entry.key.get()), varKind(entry.key.get()), functionSymbolTable, IgnoreExisting);
665 }
666
667
668 m_newTargetRegister = addVar();
669 switch (parseMode) {
670 case SourceParseMode::GeneratorWrapperFunctionMode:
671 case SourceParseMode::GeneratorWrapperMethodMode:
672 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
673 case SourceParseMode::AsyncGeneratorWrapperFunctionMode: {
674 m_generatorRegister = addVar();
675
676 // FIXME: Emit to_this only when Generator uses it.
677 // https://bugs.webkit.org/show_bug.cgi?id=151586
678 emitToThis();
679
680 move(m_generatorRegister, &m_calleeRegister);
681 emitCreateThis(m_generatorRegister);
682 break;
683 }
684
685 case SourceParseMode::AsyncArrowFunctionMode:
686 case SourceParseMode::AsyncMethodMode:
687 case SourceParseMode::AsyncFunctionMode: {
688 ASSERT(!isConstructor());
689 ASSERT(constructorKind() == ConstructorKind::None);
690 m_generatorRegister = addVar();
691 m_promiseCapabilityRegister = addVar();
692
693 if (parseMode != SourceParseMode::AsyncArrowFunctionMode) {
694 // FIXME: Emit to_this only when AsyncFunctionBody uses it.
695 // https://bugs.webkit.org/show_bug.cgi?id=151586
696 emitToThis();
697 }
698
699 emitNewObject(m_generatorRegister);
700
701 // let promiseCapability be @newPromiseCapability(@Promise)
702 auto varNewPromiseCapability = variable(propertyNames().builtinNames().newPromiseCapabilityPrivateName());
703 RefPtr<RegisterID> scope = newTemporary();
704 move(scope.get(), emitResolveScope(scope.get(), varNewPromiseCapability));
705 RefPtr<RegisterID> newPromiseCapability = emitGetFromScope(newTemporary(), scope.get(), varNewPromiseCapability, ThrowIfNotFound);
706
707 CallArguments args(*this, nullptr, 1);
708 emitLoad(args.thisRegister(), jsUndefined());
709
710 auto& builtinNames = propertyNames().builtinNames();
711 auto varPromiseConstructor = variable(m_isBuiltinFunction ? builtinNames.InternalPromisePrivateName() : builtinNames.PromisePrivateName());
712 move(scope.get(), emitResolveScope(scope.get(), varPromiseConstructor));
713 emitGetFromScope(args.argumentRegister(0), scope.get(), varPromiseConstructor, ThrowIfNotFound);
714
715 // JSTextPosition(int _line, int _offset, int _lineStartOffset)
716 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
717 emitCall(promiseCapabilityRegister(), newPromiseCapability.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
718 break;
719 }
720
721 case SourceParseMode::AsyncGeneratorBodyMode:
722 case SourceParseMode::AsyncFunctionBodyMode:
723 case SourceParseMode::AsyncArrowFunctionBodyMode:
724 case SourceParseMode::GeneratorBodyMode: {
725 // |this| is already filled correctly before here.
726 emitLoad(m_newTargetRegister, jsUndefined());
727 break;
728 }
729
730 default: {
731 if (SourceParseMode::ArrowFunctionMode != parseMode) {
732 if (isConstructor()) {
733 move(m_newTargetRegister, &m_thisRegister);
734 if (constructorKind() == ConstructorKind::Extends) {
735 moveEmptyValue(&m_thisRegister);
736 } else
737 emitCreateThis(&m_thisRegister);
738 } else if (constructorKind() != ConstructorKind::None)
739 emitThrowTypeError("Cannot call a class constructor without |new|");
740 else {
741 bool shouldEmitToThis = false;
742 if (functionNode->usesThis() || codeBlock->usesEval() || m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseEval())
743 shouldEmitToThis = true;
744 else if ((functionNode->usesSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty()) && !codeBlock->isStrictMode()) {
745 // We must emit to_this when we're not in strict mode because we
746 // will convert |this| to an object, and that object may be passed
747 // to a strict function as |this|. This is observable because that
748 // strict function's to_this will just return the object.
749 //
750 // We don't need to emit this for strict-mode code because
751 // strict-mode code may call another strict function, which will
752 // to_this if it directly uses this; this is OK, because we defer
753 // to_this until |this| is used directly. Strict-mode code might
754 // also call a sloppy mode function, and that will to_this, which
755 // will defer the conversion, again, until necessary.
756 shouldEmitToThis = true;
757 }
758
759 if (shouldEmitToThis)
760 emitToThis();
761 }
762 }
763 break;
764 }
765 }
766
767 // We need load |super| & |this| for arrow function before initializeDefaultParameterValuesAndSetupFunctionScopeStack
768 // if we have default parameter expression. Because |super| & |this| values can be used there
769 if ((SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && !isSimpleParameterList) || parseMode == SourceParseMode::AsyncArrowFunctionBodyMode) {
770 if (functionNode->usesThis() || functionNode->usesSuperProperty())
771 emitLoadThisFromArrowFunctionLexicalEnvironment();
772
773 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
774 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
775 }
776
777 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunction()) {
778 bool canReuseLexicalEnvironment = isSimpleParameterList;
779 initializeArrowFunctionContextScopeIfNeeded(functionSymbolTable, canReuseLexicalEnvironment);
780 emitPutThisToArrowFunctionContextScope();
781 emitPutNewTargetToArrowFunctionContextScope();
782 emitPutDerivedConstructorToArrowFunctionContextScope();
783 }
784
785 // All "addVar()"s needs to happen before "initializeDefaultParameterValuesAndSetupFunctionScopeStack()" is called
786 // because a function's default parameter ExpressionNodes will use temporary registers.
787 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
788
789 Ref<Label> catchLabel = newLabel();
790 TryData* tryFormalParametersData = nullptr;
791 bool needTryCatch = isAsyncFunctionWrapperParseMode(parseMode) && !isSimpleParameterList;
792 if (needTryCatch) {
793 Ref<Label> tryFormalParametersStart = newEmittedLabel();
794 tryFormalParametersData = pushTry(tryFormalParametersStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
795 }
796
797 initializeDefaultParameterValuesAndSetupFunctionScopeStack(parameters, isSimpleParameterList, functionNode, functionSymbolTable, symbolTableConstantIndex, captures, shouldCreateArgumentsVariableInParameterScope);
798
799 if (needTryCatch) {
800 Ref<Label> didNotThrow = newLabel();
801 emitJump(didNotThrow.get());
802 emitLabel(catchLabel.get());
803 popTry(tryFormalParametersData, catchLabel.get());
804
805 RefPtr<RegisterID> thrownValue = newTemporary();
806 emitOutOfLineCatchHandler(thrownValue.get(), nullptr, tryFormalParametersData);
807
808 // return promiseCapability.@reject(thrownValue)
809 RefPtr<RegisterID> reject = emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().rejectPrivateName());
810
811 CallArguments args(*this, nullptr, 1);
812 emitLoad(args.thisRegister(), jsUndefined());
813 move(args.argumentRegister(0), thrownValue.get());
814
815 JSTextPosition divot(functionNode->firstLine(), functionNode->startOffset(), functionNode->lineStartOffset());
816
817 RefPtr<RegisterID> result = emitCall(newTemporary(), reject.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
818 emitReturn(emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().promisePrivateName()));
819
820 emitLabel(didNotThrow.get());
821 }
822
823 // If we don't have default parameter expression, then loading |this| inside an arrow function must be done
824 // after initializeDefaultParameterValuesAndSetupFunctionScopeStack() because that function sets up the
825 // SymbolTable stack and emitLoadThisFromArrowFunctionLexicalEnvironment() consults the SymbolTable stack
826 if (SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && isSimpleParameterList) {
827 if (functionNode->usesThis() || functionNode->usesSuperProperty())
828 emitLoadThisFromArrowFunctionLexicalEnvironment();
829
830 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
831 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
832 }
833
834 // Set up the lexical environment scope as the generator frame. We store the saved and resumed generator registers into this scope with the symbol keys.
835 // Since they are symbol keyed, these variables cannot be reached from the usual code.
836 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
837 m_generatorFrameSymbolTable.set(*m_vm, functionSymbolTable);
838 m_generatorFrameSymbolTableIndex = symbolTableConstantIndex;
839 if (m_lexicalEnvironmentRegister)
840 move(generatorFrameRegister(), m_lexicalEnvironmentRegister);
841 else {
842 // It would be possible that generator does not need to suspend and resume any registers.
843 // In this case, we would like to avoid creating a lexical environment as much as possible.
844 // op_create_generator_frame_environment is a marker, which is similar to op_yield.
845 // Generatorification inserts lexical environment creation if necessary. Otherwise, we convert it to op_mov frame, `undefined`.
846 OpCreateGeneratorFrameEnvironment::emit(this, generatorFrameRegister(), scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
847 }
848 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorFramePrivateName(), generatorFrameRegister());
849 }
850
851 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
852 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
853}
854
855BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
856 : m_codeGenerationMode(codeGenerationMode)
857 , m_scopeNode(evalNode)
858 , m_codeBlock(vm, codeBlock)
859 , m_thisRegister(CallFrame::thisArgumentOffset())
860 , m_codeType(EvalCode)
861 , m_vm(&vm)
862 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
863 , m_needsToUpdateArrowFunctionContext(evalNode->usesArrowFunction() || evalNode->usesEval())
864 , m_derivedContextType(codeBlock->derivedContextType())
865{
866 for (auto& constantRegister : m_linkTimeConstantRegisters)
867 constantRegister = nullptr;
868
869 allocateCalleeSaveSpace();
870
871 m_codeBlock->setNumParameters(1);
872
873 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
874
875 emitEnter();
876
877 allocateAndEmitScope();
878
879 emitCheckTraps();
880
881 for (FunctionMetadataNode* function : evalNode->functionStack()) {
882 m_codeBlock->addFunctionDecl(makeFunction(function));
883 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
884 }
885
886 const VariableEnvironment& varDeclarations = evalNode->varDeclarations();
887 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
888 Vector<Identifier, 0, UnsafeVectorOverflow> hoistedFunctions;
889 for (auto& entry : varDeclarations) {
890 ASSERT(entry.value.isVar());
891 ASSERT(entry.key->isAtomic() || entry.key->isSymbol());
892 if (entry.value.isSloppyModeHoistingCandidate())
893 hoistedFunctions.append(Identifier::fromUid(m_vm, entry.key.get()));
894 else
895 variables.append(Identifier::fromUid(m_vm, entry.key.get()));
896 }
897 codeBlock->adoptVariables(variables);
898 codeBlock->adoptFunctionHoistingCandidates(WTFMove(hoistedFunctions));
899
900 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
901 m_newTargetRegister = addVar();
902
903 if (codeBlock->isArrowFunctionContext() && (evalNode->usesThis() || evalNode->usesSuperProperty()))
904 emitLoadThisFromArrowFunctionLexicalEnvironment();
905
906 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
907 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
908
909 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunctionContext() && !isDerivedConstructorContext()) {
910 initializeArrowFunctionContextScopeIfNeeded();
911 emitPutThisToArrowFunctionContextScope();
912 }
913
914 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
915 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
916}
917
918BytecodeGenerator::BytecodeGenerator(VM& vm, ModuleProgramNode* moduleProgramNode, UnlinkedModuleProgramCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
919 : m_codeGenerationMode(codeGenerationMode)
920 , m_scopeNode(moduleProgramNode)
921 , m_codeBlock(vm, codeBlock)
922 , m_thisRegister(CallFrame::thisArgumentOffset())
923 , m_codeType(ModuleCode)
924 , m_vm(&vm)
925 , m_usesNonStrictEval(false)
926 , m_needsToUpdateArrowFunctionContext(moduleProgramNode->usesArrowFunction() || moduleProgramNode->usesEval())
927{
928 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
929
930 for (auto& constantRegister : m_linkTimeConstantRegisters)
931 constantRegister = nullptr;
932
933 allocateCalleeSaveSpace();
934
935 SymbolTable* moduleEnvironmentSymbolTable = SymbolTable::create(*m_vm);
936 moduleEnvironmentSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
937 moduleEnvironmentSymbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
938
939 bool shouldCaptureAllOfTheThings = shouldEmitDebugHooks() || codeBlock->usesEval();
940 if (shouldCaptureAllOfTheThings)
941 moduleProgramNode->varDeclarations().markAllVariablesAsCaptured();
942
943 auto captures = [&] (UniquedStringImpl* uid) -> bool {
944 return moduleProgramNode->captures(uid);
945 };
946 auto lookUpVarKind = [&] (UniquedStringImpl* uid, const VariableEnvironmentEntry& entry) -> VarKind {
947 // Allocate the exported variables in the module environment.
948 if (entry.isExported())
949 return VarKind::Scope;
950
951 // Allocate the namespace variables in the module environment to instantiate
952 // it from the outside of the module code.
953 if (entry.isImportedNamespace())
954 return VarKind::Scope;
955
956 if (entry.isCaptured())
957 return VarKind::Scope;
958 return captures(uid) ? VarKind::Scope : VarKind::Stack;
959 };
960
961 emitEnter();
962
963 allocateAndEmitScope();
964
965 emitCheckTraps();
966
967 m_calleeRegister.setIndex(CallFrameSlot::callee);
968
969 m_codeBlock->setNumParameters(1); // Allocate space for "this"
970
971 // Now declare all variables.
972
973 createVariable(m_vm->propertyNames->builtinNames().metaPrivateName(), VarKind::Scope, moduleEnvironmentSymbolTable, VerifyExisting);
974
975 for (auto& entry : moduleProgramNode->varDeclarations()) {
976 ASSERT(!entry.value.isLet() && !entry.value.isConst());
977 if (!entry.value.isVar()) // This is either a parameter or callee.
978 continue;
979 // Imported bindings are not allocated in the module environment as usual variables' way.
980 // These references remain the "Dynamic" in the unlinked code block. Later, when linking
981 // the code block, we resolve the reference to the "ModuleVar".
982 if (entry.value.isImported() && !entry.value.isImportedNamespace())
983 continue;
984 createVariable(Identifier::fromUid(m_vm, entry.key.get()), lookUpVarKind(entry.key.get(), entry.value), moduleEnvironmentSymbolTable, IgnoreExisting);
985 }
986
987 VariableEnvironment& lexicalVariables = moduleProgramNode->lexicalVariables();
988 instantiateLexicalVariables(lexicalVariables, moduleEnvironmentSymbolTable, ScopeRegisterType::Block, lookUpVarKind);
989
990 // We keep the symbol table in the constant pool.
991 RegisterID* constantSymbolTable = nullptr;
992 if (shouldEmitTypeProfilerHooks())
993 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable);
994 else
995 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable->cloneScopePart(*m_vm));
996
997 pushTDZVariables(lexicalVariables, TDZCheckOptimization::Optimize, TDZRequirement::UnderTDZ);
998 bool isWithScope = false;
999 m_lexicalScopeStack.append({ moduleEnvironmentSymbolTable, m_topMostScope, isWithScope, constantSymbolTable->index() });
1000 emitPrefillStackTDZVariables(lexicalVariables, moduleEnvironmentSymbolTable);
1001
1002 // makeFunction assumes that there's correct TDZ stack entries.
1003 // So it should be called after putting our lexical environment to the TDZ stack correctly.
1004
1005 for (FunctionMetadataNode* function : moduleProgramNode->functionStack()) {
1006 const auto& iterator = moduleProgramNode->varDeclarations().find(function->ident().impl());
1007 RELEASE_ASSERT(iterator != moduleProgramNode->varDeclarations().end());
1008 RELEASE_ASSERT(!iterator->value.isImported());
1009
1010 VarKind varKind = lookUpVarKind(iterator->key.get(), iterator->value);
1011 if (varKind == VarKind::Scope) {
1012 // http://www.ecma-international.org/ecma-262/6.0/#sec-moduledeclarationinstantiation
1013 // Section 15.2.1.16.4, step 16-a-iv-1.
1014 // All heap allocated function declarations should be instantiated when the module environment
1015 // is created. They include the exported function declarations and not-exported-but-heap-allocated
1016 // function declarations. This is required because exported function should be instantiated before
1017 // executing the any module in the dependency graph. This enables the modules to link the imported
1018 // bindings before executing the any module code.
1019 //
1020 // And since function declarations are instantiated before executing the module body code, the spec
1021 // allows the functions inside the module to be executed before its module body is executed under
1022 // the circular dependencies. The following is the example.
1023 //
1024 // Module A (executed first):
1025 // import { b } from "B";
1026 // // Here, the module "B" is not executed yet, but the function declaration is already instantiated.
1027 // // So we can call the function exported from "B".
1028 // b();
1029 //
1030 // export function a() {
1031 // }
1032 //
1033 // Module B (executed second):
1034 // import { a } from "A";
1035 //
1036 // export function b() {
1037 // c();
1038 // }
1039 //
1040 // // c is not exported, but since it is referenced from the b, we should instantiate it before
1041 // // executing the "B" module code.
1042 // function c() {
1043 // a();
1044 // }
1045 //
1046 // Module EntryPoint (executed last):
1047 // import "B";
1048 // import "A";
1049 //
1050 m_codeBlock->addFunctionDecl(makeFunction(function));
1051 } else {
1052 // Stack allocated functions can be allocated when executing the module's body.
1053 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
1054 }
1055 }
1056
1057 // Remember the constant register offset to the top-most symbol table. This symbol table will be
1058 // cloned in the code block linking. After that, to create the module environment, we retrieve
1059 // the cloned symbol table from the linked code block by using this offset.
1060 codeBlock->setModuleEnvironmentSymbolTableConstantRegisterOffset(constantSymbolTable->index());
1061}
1062
1063BytecodeGenerator::~BytecodeGenerator()
1064{
1065}
1066
1067void BytecodeGenerator::initializeDefaultParameterValuesAndSetupFunctionScopeStack(
1068 FunctionParameters& parameters, bool isSimpleParameterList, FunctionNode* functionNode, SymbolTable* functionSymbolTable,
1069 int symbolTableConstantIndex, const ScopedLambda<bool (UniquedStringImpl*)>& captures, bool shouldCreateArgumentsVariableInParameterScope)
1070{
1071 Vector<std::pair<Identifier, RefPtr<RegisterID>>> valuesToMoveIntoVars;
1072 ASSERT(!(isSimpleParameterList && shouldCreateArgumentsVariableInParameterScope));
1073 if (!isSimpleParameterList) {
1074 // Refer to the ES6 spec section 9.2.12: http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
1075 // This implements step 21.
1076 VariableEnvironment environment;
1077 Vector<Identifier> allParameterNames;
1078 for (unsigned i = 0; i < parameters.size(); i++)
1079 parameters.at(i).first->collectBoundIdentifiers(allParameterNames);
1080 if (shouldCreateArgumentsVariableInParameterScope)
1081 allParameterNames.append(propertyNames().arguments);
1082 IdentifierSet parameterSet;
1083 for (auto& ident : allParameterNames) {
1084 parameterSet.add(ident.impl());
1085 auto addResult = environment.add(ident);
1086 addResult.iterator->value.setIsLet(); // When we have default parameter expressions, parameters act like "let" variables.
1087 if (captures(ident.impl()))
1088 addResult.iterator->value.setIsCaptured();
1089 }
1090 // This implements step 25 of section 9.2.12.
1091 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1092
1093 if (shouldCreateArgumentsVariableInParameterScope) {
1094 Variable argumentsVariable = variable(propertyNames().arguments);
1095 initializeVariable(argumentsVariable, m_argumentsRegister);
1096 liftTDZCheckIfPossible(argumentsVariable);
1097 }
1098
1099 RefPtr<RegisterID> temp = newTemporary();
1100 for (unsigned i = 0; i < parameters.size(); i++) {
1101 std::pair<DestructuringPatternNode*, ExpressionNode*> parameter = parameters.at(i);
1102 if (parameter.first->isRestParameter())
1103 continue;
1104 if ((i + 1) < m_parameters.size())
1105 move(temp.get(), &m_parameters[i + 1]);
1106 else
1107 emitGetArgument(temp.get(), i);
1108 if (parameter.second) {
1109 RefPtr<RegisterID> condition = emitIsUndefined(newTemporary(), temp.get());
1110 Ref<Label> skipDefaultParameterBecauseNotUndefined = newLabel();
1111 emitJumpIfFalse(condition.get(), skipDefaultParameterBecauseNotUndefined.get());
1112 emitNode(temp.get(), parameter.second);
1113 emitLabel(skipDefaultParameterBecauseNotUndefined.get());
1114 }
1115
1116 parameter.first->bindValue(*this, temp.get());
1117 }
1118
1119 // Final act of weirdness for default parameters. If a "var" also
1120 // has the same name as a parameter, it should start out as the
1121 // value of that parameter. Note, though, that they will be distinct
1122 // bindings.
1123 // This is step 28 of section 9.2.12.
1124 for (auto& entry : functionNode->varDeclarations()) {
1125 if (!entry.value.isVar()) // This is either a parameter or callee.
1126 continue;
1127
1128 if (parameterSet.contains(entry.key)) {
1129 Identifier ident = Identifier::fromUid(m_vm, entry.key.get());
1130 Variable var = variable(ident);
1131 RegisterID* scope = emitResolveScope(nullptr, var);
1132 RefPtr<RegisterID> value = emitGetFromScope(newTemporary(), scope, var, DoNotThrowIfNotFound);
1133 valuesToMoveIntoVars.append(std::make_pair(ident, value));
1134 }
1135 }
1136
1137 // Functions with default parameter expressions must have a separate environment
1138 // record for parameters and "var"s. The "var" environment record must have the
1139 // parameter environment record as its parent.
1140 // See step 28 of section 9.2.12.
1141 bool hasCapturedVariables = !!m_lexicalEnvironmentRegister;
1142 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, hasCapturedVariables);
1143 }
1144
1145 // This completes step 28 of section 9.2.12.
1146 for (unsigned i = 0; i < valuesToMoveIntoVars.size(); i++) {
1147 ASSERT(!isSimpleParameterList);
1148 Variable var = variable(valuesToMoveIntoVars[i].first);
1149 RegisterID* scope = emitResolveScope(nullptr, var);
1150 emitPutToScope(scope, var, valuesToMoveIntoVars[i].second.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
1151 }
1152}
1153
1154bool BytecodeGenerator::needsDerivedConstructorInArrowFunctionLexicalEnvironment()
1155{
1156 ASSERT(m_codeBlock->isClassContext() || !(isConstructor() && constructorKind() == ConstructorKind::Extends));
1157 return m_codeBlock->isClassContext() && isSuperUsedInInnerArrowFunction();
1158}
1159
1160void BytecodeGenerator::initializeArrowFunctionContextScopeIfNeeded(SymbolTable* functionSymbolTable, bool canReuseLexicalEnvironment)
1161{
1162 ASSERT(!m_arrowFunctionContextLexicalEnvironmentRegister);
1163
1164 if (canReuseLexicalEnvironment && m_lexicalEnvironmentRegister) {
1165 RELEASE_ASSERT(!m_codeBlock->isArrowFunction());
1166 RELEASE_ASSERT(functionSymbolTable);
1167
1168 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalEnvironmentRegister;
1169
1170 ScopeOffset offset;
1171
1172 if (isThisUsedInInnerArrowFunction()) {
1173 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1174 functionSymbolTable->set(NoLockingNecessary, propertyNames().thisIdentifier.impl(), SymbolTableEntry(VarOffset(offset)));
1175 }
1176
1177 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1178 offset = functionSymbolTable->takeNextScopeOffset();
1179 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().newTargetLocalPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1180 }
1181
1182 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1183 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1184 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().derivedConstructorPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1185 }
1186
1187 return;
1188 }
1189
1190 VariableEnvironment environment;
1191
1192 if (isThisUsedInInnerArrowFunction()) {
1193 auto addResult = environment.add(propertyNames().thisIdentifier);
1194 addResult.iterator->value.setIsCaptured();
1195 addResult.iterator->value.setIsLet();
1196 }
1197
1198 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1199 auto addTarget = environment.add(propertyNames().builtinNames().newTargetLocalPrivateName());
1200 addTarget.iterator->value.setIsCaptured();
1201 addTarget.iterator->value.setIsLet();
1202 }
1203
1204 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1205 auto derivedConstructor = environment.add(propertyNames().builtinNames().derivedConstructorPrivateName());
1206 derivedConstructor.iterator->value.setIsCaptured();
1207 derivedConstructor.iterator->value.setIsLet();
1208 }
1209
1210 if (environment.size() > 0) {
1211 size_t size = m_lexicalScopeStack.size();
1212 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1213
1214 ASSERT_UNUSED(size, m_lexicalScopeStack.size() == size + 1);
1215
1216 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalScopeStack.last().m_scope;
1217 }
1218}
1219
1220RegisterID* BytecodeGenerator::initializeNextParameter()
1221{
1222 VirtualRegister reg = virtualRegisterForArgument(m_codeBlock->numParameters());
1223 m_parameters.grow(m_parameters.size() + 1);
1224 auto& parameter = registerFor(reg);
1225 parameter.setIndex(reg.offset());
1226 m_codeBlock->addParameter();
1227 return &parameter;
1228}
1229
1230void BytecodeGenerator::initializeParameters(FunctionParameters& parameters)
1231{
1232 // Make sure the code block knows about all of our parameters, and make sure that parameters
1233 // needing destructuring are noted.
1234 m_thisRegister.setIndex(initializeNextParameter()->index()); // this
1235
1236 bool nonSimpleArguments = false;
1237 for (unsigned i = 0; i < parameters.size(); ++i) {
1238 auto parameter = parameters.at(i);
1239 auto pattern = parameter.first;
1240 if (pattern->isRestParameter()) {
1241 RELEASE_ASSERT(!m_restParameter);
1242 m_restParameter = static_cast<RestParameterNode*>(pattern);
1243 nonSimpleArguments = true;
1244 continue;
1245 }
1246 if (parameter.second) {
1247 nonSimpleArguments = true;
1248 continue;
1249 }
1250 if (!nonSimpleArguments)
1251 initializeNextParameter();
1252 }
1253}
1254
1255void BytecodeGenerator::initializeVarLexicalEnvironment(int symbolTableConstantIndex, SymbolTable* functionSymbolTable, bool hasCapturedVariables)
1256{
1257 if (hasCapturedVariables) {
1258 RELEASE_ASSERT(m_lexicalEnvironmentRegister);
1259 OpCreateLexicalEnvironment::emit(this, m_lexicalEnvironmentRegister, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
1260
1261 OpMov::emit(this, scopeRegister(), m_lexicalEnvironmentRegister);
1262
1263 pushLocalControlFlowScope();
1264 }
1265 bool isWithScope = false;
1266 m_lexicalScopeStack.append({ functionSymbolTable, m_lexicalEnvironmentRegister, isWithScope, symbolTableConstantIndex });
1267 m_varScopeLexicalScopeStackIndex = m_lexicalScopeStack.size() - 1;
1268}
1269
1270UniquedStringImpl* BytecodeGenerator::visibleNameForParameter(DestructuringPatternNode* pattern)
1271{
1272 if (pattern->isBindingNode()) {
1273 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
1274 if (!m_functions.contains(ident.impl()))
1275 return ident.impl();
1276 }
1277 return nullptr;
1278}
1279
1280RegisterID* BytecodeGenerator::newRegister()
1281{
1282 m_calleeLocals.append(virtualRegisterForLocal(m_calleeLocals.size()));
1283 int numCalleeLocals = std::max<int>(m_codeBlock->m_numCalleeLocals, m_calleeLocals.size());
1284 numCalleeLocals = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeLocals);
1285 m_codeBlock->m_numCalleeLocals = numCalleeLocals;
1286 return &m_calleeLocals.last();
1287}
1288
1289void BytecodeGenerator::reclaimFreeRegisters()
1290{
1291 shrinkToFit(m_calleeLocals);
1292}
1293
1294RegisterID* BytecodeGenerator::newBlockScopeVariable()
1295{
1296 reclaimFreeRegisters();
1297
1298 return newRegister();
1299}
1300
1301RegisterID* BytecodeGenerator::newTemporary()
1302{
1303 reclaimFreeRegisters();
1304
1305 RegisterID* result = newRegister();
1306 result->setTemporary();
1307 return result;
1308}
1309
1310Ref<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
1311{
1312 shrinkToFit(m_labelScopes);
1313
1314 // Allocate new label scope.
1315 m_labelScopes.append(type, name, labelScopeDepth(), newLabel(), type == LabelScope::Loop ? RefPtr<Label>(newLabel()) : RefPtr<Label>()); // Only loops have continue targets.
1316 return m_labelScopes.last();
1317}
1318
1319Ref<Label> BytecodeGenerator::newLabel()
1320{
1321 shrinkToFit(m_labels);
1322
1323 // Allocate new label ID.
1324 m_labels.append();
1325 return m_labels.last();
1326}
1327
1328Ref<Label> BytecodeGenerator::newEmittedLabel()
1329{
1330 Ref<Label> label = newLabel();
1331 emitLabel(label.get());
1332 return label;
1333}
1334
1335void BytecodeGenerator::recordOpcode(OpcodeID opcodeID)
1336{
1337 ASSERT(m_lastOpcodeID == op_end || (m_lastOpcodeID == m_lastInstruction->opcodeID() && m_writer.position() == m_lastInstruction.offset() + m_lastInstruction->size()));
1338 m_lastInstruction = m_writer.ref();
1339 m_lastOpcodeID = opcodeID;
1340}
1341
1342void BytecodeGenerator::alignWideOpcode()
1343{
1344#if CPU(NEEDS_ALIGNED_ACCESS)
1345 while ((m_writer.position() + 1) % OpcodeSize::Wide)
1346 OpNop::emit<OpcodeSize::Narrow>(this);
1347#endif
1348}
1349
1350void BytecodeGenerator::emitLabel(Label& l0)
1351{
1352 unsigned newLabelIndex = instructions().size();
1353 l0.setLocation(*this, newLabelIndex);
1354
1355 if (m_codeBlock->numberOfJumpTargets()) {
1356 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
1357 ASSERT(lastLabelIndex <= newLabelIndex);
1358 if (newLabelIndex == lastLabelIndex) {
1359 // Peephole optimizations have already been disabled by emitting the last label
1360 return;
1361 }
1362 }
1363
1364 m_codeBlock->addJumpTarget(newLabelIndex);
1365
1366 // This disables peephole optimizations when an instruction is a jump target
1367 m_lastOpcodeID = op_end;
1368}
1369
1370void BytecodeGenerator::emitEnter()
1371{
1372 OpEnter::emit(this);
1373
1374 if (LIKELY(Options::optimizeRecursiveTailCalls())) {
1375 // We must add the end of op_enter as a potential jump target, because the bytecode parser may decide to split its basic block
1376 // to have somewhere to jump to if there is a recursive tail-call that points to this function.
1377 m_codeBlock->addJumpTarget(instructions().size());
1378 // This disables peephole optimizations when an instruction is a jump target
1379 m_lastOpcodeID = op_end;
1380 }
1381}
1382
1383void BytecodeGenerator::emitLoopHint()
1384{
1385 OpLoopHint::emit(this);
1386 emitCheckTraps();
1387}
1388
1389void BytecodeGenerator::emitJump(Label& target)
1390{
1391 OpJmp::emit(this, target.bind(this));
1392}
1393
1394void BytecodeGenerator::emitCheckTraps()
1395{
1396 OpCheckTraps::emit(this);
1397}
1398
1399void ALWAYS_INLINE BytecodeGenerator::rewind()
1400{
1401 ASSERT(m_lastInstruction.isValid());
1402 m_lastOpcodeID = op_end;
1403 m_writer.rewind(m_lastInstruction);
1404}
1405
1406template<typename BinOp, typename JmpOp>
1407bool BytecodeGenerator::fuseCompareAndJump(RegisterID* cond, Label& target, bool swapOperands)
1408{
1409 ASSERT(canDoPeepholeOptimization());
1410 auto binop = m_lastInstruction->as<BinOp>();
1411 if (cond->index() == binop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1412 rewind();
1413
1414 if (swapOperands)
1415 std::swap(binop.m_lhs, binop.m_rhs);
1416
1417 JmpOp::emit(this, binop.m_lhs, binop.m_rhs, target.bind(this));
1418 return true;
1419 }
1420 return false;
1421}
1422
1423template<typename UnaryOp, typename JmpOp>
1424bool BytecodeGenerator::fuseTestAndJmp(RegisterID* cond, Label& target)
1425{
1426 ASSERT(canDoPeepholeOptimization());
1427 auto unop = m_lastInstruction->as<UnaryOp>();
1428 if (cond->index() == unop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1429 rewind();
1430
1431 JmpOp::emit(this, unop.m_operand, target.bind(this));
1432 return true;
1433 }
1434 return false;
1435}
1436
1437void BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label& target)
1438{
1439 if (canDoPeepholeOptimization()) {
1440 if (m_lastOpcodeID == op_less) {
1441 if (fuseCompareAndJump<OpLess, OpJless>(cond, target))
1442 return;
1443 } else if (m_lastOpcodeID == op_lesseq) {
1444 if (fuseCompareAndJump<OpLesseq, OpJlesseq>(cond, target))
1445 return;
1446 } else if (m_lastOpcodeID == op_greater) {
1447 if (fuseCompareAndJump<OpGreater, OpJgreater>(cond, target))
1448 return;
1449 } else if (m_lastOpcodeID == op_greatereq) {
1450 if (fuseCompareAndJump<OpGreatereq, OpJgreatereq>(cond, target))
1451 return;
1452 } else if (m_lastOpcodeID == op_eq) {
1453 if (fuseCompareAndJump<OpEq, OpJeq>(cond, target))
1454 return;
1455 } else if (m_lastOpcodeID == op_stricteq) {
1456 if (fuseCompareAndJump<OpStricteq, OpJstricteq>(cond, target))
1457 return;
1458 } else if (m_lastOpcodeID == op_neq) {
1459 if (fuseCompareAndJump<OpNeq, OpJneq>(cond, target))
1460 return;
1461 } else if (m_lastOpcodeID == op_nstricteq) {
1462 if (fuseCompareAndJump<OpNstricteq, OpJnstricteq>(cond, target))
1463 return;
1464 } else if (m_lastOpcodeID == op_below) {
1465 if (fuseCompareAndJump<OpBelow, OpJbelow>(cond, target))
1466 return;
1467 } else if (m_lastOpcodeID == op_beloweq) {
1468 if (fuseCompareAndJump<OpBeloweq, OpJbeloweq>(cond, target))
1469 return;
1470 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1471 if (fuseTestAndJmp<OpEqNull, OpJeqNull>(cond, target))
1472 return;
1473 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1474 if (fuseTestAndJmp<OpNeqNull, OpJneqNull>(cond, target))
1475 return;
1476 }
1477 }
1478
1479 OpJtrue::emit(this, cond, target.bind(this));
1480}
1481
1482void BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label& target)
1483{
1484 if (canDoPeepholeOptimization()) {
1485 if (m_lastOpcodeID == op_less && target.isForward()) {
1486 if (fuseCompareAndJump<OpLess, OpJnless>(cond, target))
1487 return;
1488 } else if (m_lastOpcodeID == op_lesseq && target.isForward()) {
1489 if (fuseCompareAndJump<OpLesseq, OpJnlesseq>(cond, target))
1490 return;
1491 } else if (m_lastOpcodeID == op_greater && target.isForward()) {
1492 if (fuseCompareAndJump<OpGreater, OpJngreater>(cond, target))
1493 return;
1494 } else if (m_lastOpcodeID == op_greatereq && target.isForward()) {
1495 if (fuseCompareAndJump<OpGreatereq, OpJngreatereq>(cond, target))
1496 return;
1497 } else if (m_lastOpcodeID == op_eq && target.isForward()) {
1498 if (fuseCompareAndJump<OpEq, OpJneq>(cond, target))
1499 return;
1500 } else if (m_lastOpcodeID == op_stricteq && target.isForward()) {
1501 if (fuseCompareAndJump<OpStricteq, OpJnstricteq>(cond, target))
1502 return;
1503 } else if (m_lastOpcodeID == op_neq && target.isForward()) {
1504 if (fuseCompareAndJump<OpNeq, OpJeq>(cond, target))
1505 return;
1506 } else if (m_lastOpcodeID == op_nstricteq && target.isForward()) {
1507 if (fuseCompareAndJump<OpNstricteq, OpJstricteq>(cond, target))
1508 return;
1509 } else if (m_lastOpcodeID == op_below && target.isForward()) {
1510 if (fuseCompareAndJump<OpBelow, OpJbeloweq>(cond, target, true))
1511 return;
1512 } else if (m_lastOpcodeID == op_beloweq && target.isForward()) {
1513 if (fuseCompareAndJump<OpBeloweq, OpJbelow>(cond, target, true))
1514 return;
1515 } else if (m_lastOpcodeID == op_not) {
1516 if (fuseTestAndJmp<OpNot, OpJtrue>(cond, target))
1517 return;
1518 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1519 if (fuseTestAndJmp<OpEqNull, OpJneqNull>(cond, target))
1520 return;
1521 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1522 if (fuseTestAndJmp<OpNeqNull, OpJeqNull>(cond, target))
1523 return;
1524 }
1525 }
1526
1527 OpJfalse::emit(this, cond, target.bind(this));
1528}
1529
1530void BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label& target)
1531{
1532 OpJneqPtr::emit(this, cond, Special::CallFunction, target.bind(this));
1533}
1534
1535void BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label& target)
1536{
1537 OpJneqPtr::emit(this, cond, Special::ApplyFunction, target.bind(this));
1538}
1539
1540bool BytecodeGenerator::hasConstant(const Identifier& ident) const
1541{
1542 UniquedStringImpl* rep = ident.impl();
1543 return m_identifierMap.contains(rep);
1544}
1545
1546unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1547{
1548 UniquedStringImpl* rep = ident.impl();
1549 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1550 if (result.isNewEntry)
1551 m_codeBlock->addIdentifier(ident);
1552
1553 return result.iterator->value;
1554}
1555
1556// We can't hash JSValue(), so we use a dedicated data member to cache it.
1557RegisterID* BytecodeGenerator::addConstantEmptyValue()
1558{
1559 if (!m_emptyValueRegister) {
1560 int index = addConstantIndex();
1561 m_codeBlock->addConstant(JSValue());
1562 m_emptyValueRegister = &m_constantPoolRegisters[index];
1563 }
1564
1565 return m_emptyValueRegister;
1566}
1567
1568RegisterID* BytecodeGenerator::addConstantValue(JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1569{
1570 if (!v)
1571 return addConstantEmptyValue();
1572
1573 int index = m_nextConstantOffset;
1574
1575 if (sourceCodeRepresentation == SourceCodeRepresentation::Double && v.isInt32())
1576 v = jsDoubleNumber(v.asNumber());
1577 EncodedJSValueWithRepresentation valueMapKey { JSValue::encode(v), sourceCodeRepresentation };
1578 JSValueMap::AddResult result = m_jsValueMap.add(valueMapKey, m_nextConstantOffset);
1579 if (result.isNewEntry) {
1580 addConstantIndex();
1581 m_codeBlock->addConstant(v, sourceCodeRepresentation);
1582 } else
1583 index = result.iterator->value;
1584 return &m_constantPoolRegisters[index];
1585}
1586
1587RegisterID* BytecodeGenerator::moveLinkTimeConstant(RegisterID* dst, LinkTimeConstant type)
1588{
1589 unsigned constantIndex = static_cast<unsigned>(type);
1590 if (!m_linkTimeConstantRegisters[constantIndex]) {
1591 int index = addConstantIndex();
1592 m_codeBlock->addConstant(type);
1593 m_linkTimeConstantRegisters[constantIndex] = &m_constantPoolRegisters[index];
1594 }
1595
1596 if (!dst)
1597 return m_linkTimeConstantRegisters[constantIndex];
1598
1599 OpMov::emit(this, dst, m_linkTimeConstantRegisters[constantIndex]);
1600
1601 return dst;
1602}
1603
1604RegisterID* BytecodeGenerator::moveEmptyValue(RegisterID* dst)
1605{
1606 RefPtr<RegisterID> emptyValue = addConstantEmptyValue();
1607
1608 OpMov::emit(this, dst, emptyValue.get());
1609
1610 return dst;
1611}
1612
1613RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1614{
1615 ASSERT(src != m_emptyValueRegister);
1616
1617 m_staticPropertyAnalyzer.mov(dst, src);
1618 OpMov::emit(this, dst, src);
1619
1620 return dst;
1621}
1622
1623RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src, OperandTypes types)
1624{
1625 switch (opcodeID) {
1626 case op_not:
1627 emitUnaryOp<OpNot>(dst, src);
1628 break;
1629 case op_negate:
1630 OpNegate::emit(this, dst, src, types);
1631 break;
1632 case op_bitnot:
1633 emitUnaryOp<OpBitnot>(dst, src);
1634 break;
1635 case op_to_number:
1636 emitUnaryOp<OpToNumber>(dst, src);
1637 break;
1638 default:
1639 ASSERT_NOT_REACHED();
1640 }
1641 return dst;
1642}
1643
1644RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1645{
1646 switch (opcodeID) {
1647 case op_eq:
1648 return emitBinaryOp<OpEq>(dst, src1, src2, types);
1649 case op_neq:
1650 return emitBinaryOp<OpNeq>(dst, src1, src2, types);
1651 case op_stricteq:
1652 return emitBinaryOp<OpStricteq>(dst, src1, src2, types);
1653 case op_nstricteq:
1654 return emitBinaryOp<OpNstricteq>(dst, src1, src2, types);
1655 case op_less:
1656 return emitBinaryOp<OpLess>(dst, src1, src2, types);
1657 case op_lesseq:
1658 return emitBinaryOp<OpLesseq>(dst, src1, src2, types);
1659 case op_greater:
1660 return emitBinaryOp<OpGreater>(dst, src1, src2, types);
1661 case op_greatereq:
1662 return emitBinaryOp<OpGreatereq>(dst, src1, src2, types);
1663 case op_below:
1664 return emitBinaryOp<OpBelow>(dst, src1, src2, types);
1665 case op_beloweq:
1666 return emitBinaryOp<OpBeloweq>(dst, src1, src2, types);
1667 case op_mod:
1668 return emitBinaryOp<OpMod>(dst, src1, src2, types);
1669 case op_pow:
1670 return emitBinaryOp<OpPow>(dst, src1, src2, types);
1671 case op_lshift:
1672 return emitBinaryOp<OpLshift>(dst, src1, src2, types);
1673 case op_rshift:
1674 return emitBinaryOp<OpRshift>(dst, src1, src2, types);
1675 case op_urshift:
1676 return emitBinaryOp<OpUrshift>(dst, src1, src2, types);
1677 case op_add:
1678 return emitBinaryOp<OpAdd>(dst, src1, src2, types);
1679 case op_mul:
1680 return emitBinaryOp<OpMul>(dst, src1, src2, types);
1681 case op_div:
1682 return emitBinaryOp<OpDiv>(dst, src1, src2, types);
1683 case op_sub:
1684 return emitBinaryOp<OpSub>(dst, src1, src2, types);
1685 case op_bitand:
1686 return emitBinaryOp<OpBitand>(dst, src1, src2, types);
1687 case op_bitxor:
1688 return emitBinaryOp<OpBitxor>(dst, src1, src2, types);
1689 case op_bitor:
1690 return emitBinaryOp<OpBitor>(dst, src1, src2, types);
1691 default:
1692 ASSERT_NOT_REACHED();
1693 return nullptr;
1694 }
1695}
1696
1697RegisterID* BytecodeGenerator::emitToObject(RegisterID* dst, RegisterID* src, const Identifier& message)
1698{
1699 OpToObject::emit(this, dst, src, addConstant(message));
1700 return dst;
1701}
1702
1703RegisterID* BytecodeGenerator::emitToNumber(RegisterID* dst, RegisterID* src)
1704{
1705 return emitUnaryOp<OpToNumber>(dst, src);
1706}
1707
1708RegisterID* BytecodeGenerator::emitToString(RegisterID* dst, RegisterID* src)
1709{
1710 return emitUnaryOp<OpToString>(dst, src);
1711}
1712
1713RegisterID* BytecodeGenerator::emitTypeOf(RegisterID* dst, RegisterID* src)
1714{
1715 return emitUnaryOp<OpTypeof>(dst, src);
1716}
1717
1718RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1719{
1720 OpInc::emit(this, srcDst);
1721 return srcDst;
1722}
1723
1724RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1725{
1726 OpDec::emit(this, srcDst);
1727 return srcDst;
1728}
1729
1730bool BytecodeGenerator::emitEqualityOpImpl(RegisterID* dst, RegisterID* src1, RegisterID* src2)
1731{
1732 if (!canDoPeepholeOptimization())
1733 return false;
1734
1735 if (m_lastInstruction->is<OpTypeof>()) {
1736 auto op = m_lastInstruction->as<OpTypeof>();
1737 if (src1->index() == op.m_dst.offset()
1738 && src1->isTemporary()
1739 && m_codeBlock->isConstantRegisterIndex(src2->index())
1740 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1741 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1742 if (value == "undefined") {
1743 rewind();
1744 OpIsUndefined::emit(this, dst, op.m_value);
1745 return true;
1746 }
1747 if (value == "boolean") {
1748 rewind();
1749 OpIsBoolean::emit(this, dst, op.m_value);
1750 return true;
1751 }
1752 if (value == "number") {
1753 rewind();
1754 OpIsNumber::emit(this, dst, op.m_value);
1755 return true;
1756 }
1757 if (value == "string") {
1758 rewind();
1759 OpIsCellWithType::emit(this, dst, op.m_value, StringType);
1760 return true;
1761 }
1762 if (value == "symbol") {
1763 rewind();
1764 OpIsCellWithType::emit(this, dst, op.m_value, SymbolType);
1765 return true;
1766 }
1767 if (Options::useBigInt() && value == "bigint") {
1768 rewind();
1769 OpIsCellWithType::emit(this, dst, op.m_value, BigIntType);
1770 return true;
1771 }
1772 if (value == "object") {
1773 rewind();
1774 OpIsObjectOrNull::emit(this, dst, op.m_value);
1775 return true;
1776 }
1777 if (value == "function") {
1778 rewind();
1779 OpIsFunction::emit(this, dst, op.m_value);
1780 return true;
1781 }
1782 }
1783 }
1784
1785 return false;
1786}
1787
1788void BytecodeGenerator::emitTypeProfilerExpressionInfo(const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1789{
1790 ASSERT(shouldEmitTypeProfilerHooks());
1791
1792 unsigned start = startDivot.offset; // Ranges are inclusive of their endpoints, AND 0 indexed.
1793 unsigned end = endDivot.offset - 1; // End Ranges already go one past the inclusive range, so subtract 1.
1794 unsigned instructionOffset = instructions().size() - 1;
1795 m_codeBlock->addTypeProfilerExpressionInfo(instructionOffset, start, end);
1796}
1797
1798void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag)
1799{
1800 if (!shouldEmitTypeProfilerHooks())
1801 return;
1802
1803 if (!registerToProfile)
1804 return;
1805
1806 OpProfileType::emit(this, registerToProfile, { }, flag, { }, resolveType());
1807
1808 // Don't emit expression info for this version of profile type. This generally means
1809 // we're profiling information for something that isn't in the actual text of a JavaScript
1810 // program. For example, implicit return undefined from a function call.
1811}
1812
1813void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1814{
1815 emitProfileType(registerToProfile, ProfileTypeBytecodeDoesNotHaveGlobalID, startDivot, endDivot);
1816}
1817
1818void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1819{
1820 if (!shouldEmitTypeProfilerHooks())
1821 return;
1822
1823 if (!registerToProfile)
1824 return;
1825
1826 OpProfileType::emit(this, registerToProfile, { }, flag, { }, resolveType());
1827 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1828}
1829
1830void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const Variable& var, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1831{
1832 if (!shouldEmitTypeProfilerHooks())
1833 return;
1834
1835 if (!registerToProfile)
1836 return;
1837
1838 ProfileTypeBytecodeFlag flag;
1839 SymbolTableOrScopeDepth symbolTableOrScopeDepth;
1840 if (var.local() || var.offset().isScope()) {
1841 flag = ProfileTypeBytecodeLocallyResolved;
1842 ASSERT(var.symbolTableConstantIndex());
1843 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::symbolTable(VirtualRegister { var.symbolTableConstantIndex() });
1844 } else {
1845 flag = ProfileTypeBytecodeClosureVar;
1846 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::scopeDepth(localScopeDepth());
1847 }
1848
1849 OpProfileType::emit(this, registerToProfile, symbolTableOrScopeDepth, flag, addConstant(var.ident()), resolveType());
1850 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1851}
1852
1853void BytecodeGenerator::emitProfileControlFlow(int textOffset)
1854{
1855 if (shouldEmitControlFlowProfilerHooks()) {
1856 RELEASE_ASSERT(textOffset >= 0);
1857
1858 OpProfileControlFlow::emit(this, textOffset);
1859 m_codeBlock->addOpProfileControlFlowBytecodeOffset(m_lastInstruction.offset());
1860 }
1861}
1862
1863unsigned BytecodeGenerator::addConstantIndex()
1864{
1865 unsigned index = m_nextConstantOffset;
1866 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1867 ++m_nextConstantOffset;
1868 return index;
1869}
1870
1871RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1872{
1873 return emitLoad(dst, jsBoolean(b));
1874}
1875
1876RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1877{
1878 ASSERT(!identifier.isSymbol());
1879 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1880 if (!stringInMap)
1881 stringInMap = jsOwnedString(vm(), identifier.string());
1882
1883 return emitLoad(dst, JSValue(stringInMap));
1884}
1885
1886RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1887{
1888 RegisterID* constantID = addConstantValue(v, sourceCodeRepresentation);
1889 if (dst)
1890 return move(dst, constantID);
1891 return constantID;
1892}
1893
1894RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, IdentifierSet& set)
1895{
1896 if (m_codeBlock->numberOfConstantIdentifierSets()) {
1897 for (const auto& entry : m_codeBlock->constantIdentifierSets()) {
1898 if (entry.first != set)
1899 continue;
1900
1901 return &m_constantPoolRegisters[entry.second];
1902 }
1903 }
1904
1905 unsigned index = addConstantIndex();
1906 m_codeBlock->addSetConstant(set);
1907 RegisterID* m_setRegister = &m_constantPoolRegisters[index];
1908
1909 if (dst)
1910 return move(dst, m_setRegister);
1911
1912 return m_setRegister;
1913}
1914
1915template<typename LookUpVarKindFunctor>
1916bool BytecodeGenerator::instantiateLexicalVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable, ScopeRegisterType scopeRegisterType, LookUpVarKindFunctor lookUpVarKind)
1917{
1918 bool hasCapturedVariables = false;
1919 {
1920 for (auto& entry : lexicalVariables) {
1921 ASSERT(entry.value.isLet() || entry.value.isConst() || entry.value.isFunction());
1922 ASSERT(!entry.value.isVar());
1923 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1924 ASSERT(symbolTableEntry.isNull());
1925
1926 // Imported bindings which are not the namespace bindings are not allocated
1927 // in the module environment as usual variables' way.
1928 // And since these types of the variables only seen in the module environment,
1929 // other lexical environment need not to take care this.
1930 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1931 continue;
1932
1933 VarKind varKind = lookUpVarKind(entry.key.get(), entry.value);
1934 VarOffset varOffset;
1935 if (varKind == VarKind::Scope) {
1936 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
1937 hasCapturedVariables = true;
1938 } else {
1939 ASSERT(varKind == VarKind::Stack);
1940 RegisterID* local;
1941 if (scopeRegisterType == ScopeRegisterType::Block) {
1942 local = newBlockScopeVariable();
1943 local->ref();
1944 } else
1945 local = addVar();
1946 varOffset = VarOffset(local->virtualRegister());
1947 }
1948
1949 SymbolTableEntry newEntry(varOffset, static_cast<unsigned>(entry.value.isConst() ? PropertyAttribute::ReadOnly : PropertyAttribute::None));
1950 symbolTable->add(NoLockingNecessary, entry.key.get(), newEntry);
1951 }
1952 }
1953 return hasCapturedVariables;
1954}
1955
1956void BytecodeGenerator::emitPrefillStackTDZVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable)
1957{
1958 // Prefill stack variables with the TDZ empty value.
1959 // Scope variables will be initialized to the TDZ empty value when JSLexicalEnvironment is allocated.
1960 for (auto& entry : lexicalVariables) {
1961 // Imported bindings which are not the namespace bindings are not allocated
1962 // in the module environment as usual variables' way.
1963 // And since these types of the variables only seen in the module environment,
1964 // other lexical environment need not to take care this.
1965 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1966 continue;
1967
1968 if (entry.value.isFunction())
1969 continue;
1970
1971 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1972 ASSERT(!symbolTableEntry.isNull());
1973 VarOffset offset = symbolTableEntry.varOffset();
1974 if (offset.isScope())
1975 continue;
1976
1977 ASSERT(offset.isStack());
1978 moveEmptyValue(&registerFor(offset.stackOffset()));
1979 }
1980}
1981
1982void BytecodeGenerator::pushLexicalScope(VariableEnvironmentNode* node, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType, RegisterID** constantSymbolTableResult, bool shouldInitializeBlockScopedFunctions)
1983{
1984 VariableEnvironment& environment = node->lexicalVariables();
1985 RegisterID* constantSymbolTableResultTemp = nullptr;
1986 pushLexicalScopeInternal(environment, tdzCheckOptimization, nestedScopeType, &constantSymbolTableResultTemp, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1987
1988 if (shouldInitializeBlockScopedFunctions)
1989 initializeBlockScopedFunctions(environment, node->functionStack(), constantSymbolTableResultTemp);
1990
1991 if (constantSymbolTableResult && constantSymbolTableResultTemp)
1992 *constantSymbolTableResult = constantSymbolTableResultTemp;
1993}
1994
1995void BytecodeGenerator::pushLexicalScopeInternal(VariableEnvironment& environment, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType,
1996 RegisterID** constantSymbolTableResult, TDZRequirement tdzRequirement, ScopeType scopeType, ScopeRegisterType scopeRegisterType)
1997{
1998 if (!environment.size())
1999 return;
2000
2001 if (shouldEmitDebugHooks())
2002 environment.markAllVariablesAsCaptured();
2003
2004 SymbolTable* symbolTable = SymbolTable::create(*m_vm);
2005 switch (scopeType) {
2006 case ScopeType::CatchScope:
2007 symbolTable->setScopeType(SymbolTable::ScopeType::CatchScope);
2008 break;
2009 case ScopeType::LetConstScope:
2010 symbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
2011 break;
2012 case ScopeType::FunctionNameScope:
2013 symbolTable->setScopeType(SymbolTable::ScopeType::FunctionNameScope);
2014 break;
2015 }
2016
2017 if (nestedScopeType == NestedScopeType::IsNested)
2018 symbolTable->markIsNestedLexicalScope();
2019
2020 auto lookUpVarKind = [] (UniquedStringImpl*, const VariableEnvironmentEntry& entry) -> VarKind {
2021 return entry.isCaptured() ? VarKind::Scope : VarKind::Stack;
2022 };
2023
2024 bool hasCapturedVariables = instantiateLexicalVariables(environment, symbolTable, scopeRegisterType, lookUpVarKind);
2025
2026 RegisterID* newScope = nullptr;
2027 RegisterID* constantSymbolTable = nullptr;
2028 int symbolTableConstantIndex = 0;
2029 if (shouldEmitTypeProfilerHooks()) {
2030 constantSymbolTable = addConstantValue(symbolTable);
2031 symbolTableConstantIndex = constantSymbolTable->index();
2032 }
2033 if (hasCapturedVariables) {
2034 if (scopeRegisterType == ScopeRegisterType::Block) {
2035 newScope = newBlockScopeVariable();
2036 newScope->ref();
2037 } else
2038 newScope = addVar();
2039 if (!constantSymbolTable) {
2040 ASSERT(!shouldEmitTypeProfilerHooks());
2041 constantSymbolTable = addConstantValue(symbolTable->cloneScopePart(*m_vm));
2042 symbolTableConstantIndex = constantSymbolTable->index();
2043 }
2044 if (constantSymbolTableResult)
2045 *constantSymbolTableResult = constantSymbolTable;
2046
2047 OpCreateLexicalEnvironment::emit(this, newScope, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(tdzRequirement == TDZRequirement::UnderTDZ ? jsTDZValue() : jsUndefined()));
2048
2049 move(scopeRegister(), newScope);
2050
2051 pushLocalControlFlowScope();
2052 }
2053
2054 bool isWithScope = false;
2055 m_lexicalScopeStack.append({ symbolTable, newScope, isWithScope, symbolTableConstantIndex });
2056 pushTDZVariables(environment, tdzCheckOptimization, tdzRequirement);
2057
2058 if (tdzRequirement == TDZRequirement::UnderTDZ)
2059 emitPrefillStackTDZVariables(environment, symbolTable);
2060}
2061
2062void BytecodeGenerator::initializeBlockScopedFunctions(VariableEnvironment& environment, FunctionStack& functionStack, RegisterID* constantSymbolTable)
2063{
2064 /*
2065 * We must transform block scoped function declarations in strict mode like so:
2066 *
2067 * function foo() {
2068 * if (c) {
2069 * function foo() { ... }
2070 * if (bar) { ... }
2071 * else { ... }
2072 * function baz() { ... }
2073 * }
2074 * }
2075 *
2076 * to:
2077 *
2078 * function foo() {
2079 * if (c) {
2080 * let foo = function foo() { ... }
2081 * let baz = function baz() { ... }
2082 * if (bar) { ... }
2083 * else { ... }
2084 * }
2085 * }
2086 *
2087 * But without the TDZ checks.
2088 */
2089
2090 if (!environment.size()) {
2091 RELEASE_ASSERT(!functionStack.size());
2092 return;
2093 }
2094
2095 if (!functionStack.size())
2096 return;
2097
2098 SymbolTable* symbolTable = m_lexicalScopeStack.last().m_symbolTable;
2099 RegisterID* scope = m_lexicalScopeStack.last().m_scope;
2100 RefPtr<RegisterID> temp = newTemporary();
2101 int symbolTableIndex = constantSymbolTable ? constantSymbolTable->index() : 0;
2102 for (FunctionMetadataNode* function : functionStack) {
2103 const Identifier& name = function->ident();
2104 auto iter = environment.find(name.impl());
2105 RELEASE_ASSERT(iter != environment.end());
2106 RELEASE_ASSERT(iter->value.isFunction());
2107 // We purposefully don't hold the symbol table lock around this loop because emitNewFunctionExpressionCommon may GC.
2108 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, name.impl());
2109 RELEASE_ASSERT(!entry.isNull());
2110 emitNewFunctionExpressionCommon(temp.get(), function);
2111 bool isLexicallyScoped = true;
2112 emitPutToScope(scope, variableForLocalEntry(name, entry, symbolTableIndex, isLexicallyScoped), temp.get(), DoNotThrowIfNotFound, InitializationMode::Initialization);
2113 }
2114}
2115
2116void BytecodeGenerator::hoistSloppyModeFunctionIfNecessary(const Identifier& functionName)
2117{
2118 if (m_scopeNode->hasSloppyModeHoistedFunction(functionName.impl())) {
2119 if (codeType() != EvalCode) {
2120 Variable currentFunctionVariable = variable(functionName);
2121 RefPtr<RegisterID> currentValue;
2122 if (RegisterID* local = currentFunctionVariable.local())
2123 currentValue = local;
2124 else {
2125 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2126 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2127 }
2128
2129 ASSERT(m_varScopeLexicalScopeStackIndex);
2130 ASSERT(*m_varScopeLexicalScopeStackIndex < m_lexicalScopeStack.size());
2131 LexicalScopeStackEntry varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex];
2132 SymbolTable* varSymbolTable = varScope.m_symbolTable;
2133 ASSERT(varSymbolTable->scopeType() == SymbolTable::ScopeType::VarScope);
2134 SymbolTableEntry entry = varSymbolTable->get(NoLockingNecessary, functionName.impl());
2135 if (functionName == propertyNames().arguments && entry.isNull()) {
2136 // "arguments" might be put in the parameter scope when we have a non-simple
2137 // parameter list since "arguments" is visible to expressions inside the
2138 // parameter evaluation list.
2139 // e.g:
2140 // function foo(x = arguments) { { function arguments() { } } }
2141 RELEASE_ASSERT(*m_varScopeLexicalScopeStackIndex > 0);
2142 varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex - 1];
2143 SymbolTable* parameterSymbolTable = varScope.m_symbolTable;
2144 entry = parameterSymbolTable->get(NoLockingNecessary, functionName.impl());
2145 }
2146 RELEASE_ASSERT(!entry.isNull());
2147 bool isLexicallyScoped = false;
2148 emitPutToScope(varScope.m_scope, variableForLocalEntry(functionName, entry, varScope.m_symbolTableConstantIndex, isLexicallyScoped), currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2149 } else {
2150 Variable currentFunctionVariable = variable(functionName);
2151 RefPtr<RegisterID> currentValue;
2152 if (RegisterID* local = currentFunctionVariable.local())
2153 currentValue = local;
2154 else {
2155 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2156 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2157 }
2158
2159 RefPtr<RegisterID> scopeId = emitResolveScopeForHoistingFuncDeclInEval(nullptr, functionName);
2160 RefPtr<RegisterID> checkResult = emitIsUndefined(newTemporary(), scopeId.get());
2161
2162 Ref<Label> isNotVarScopeLabel = newLabel();
2163 emitJumpIfTrue(checkResult.get(), isNotVarScopeLabel.get());
2164
2165 // Put to outer scope
2166 emitPutToScope(scopeId.get(), functionName, currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2167 emitLabel(isNotVarScopeLabel.get());
2168
2169 }
2170 }
2171}
2172
2173RegisterID* BytecodeGenerator::emitResolveScopeForHoistingFuncDeclInEval(RegisterID* dst, const Identifier& property)
2174{
2175 ASSERT(m_codeType == EvalCode);
2176
2177 dst = finalDestination(dst);
2178 OpResolveScopeForHoistingFuncDeclInEval::emit(this, kill(dst), m_topMostScope, addConstant(property));
2179 return dst;
2180}
2181
2182void BytecodeGenerator::popLexicalScope(VariableEnvironmentNode* node)
2183{
2184 VariableEnvironment& environment = node->lexicalVariables();
2185 popLexicalScopeInternal(environment);
2186}
2187
2188void BytecodeGenerator::popLexicalScopeInternal(VariableEnvironment& environment)
2189{
2190 // NOTE: This function only makes sense for scopes that aren't ScopeRegisterType::Var (only function name scope right now is ScopeRegisterType::Var).
2191 // This doesn't make sense for ScopeRegisterType::Var because we deref RegisterIDs here.
2192 if (!environment.size())
2193 return;
2194
2195 if (shouldEmitDebugHooks())
2196 environment.markAllVariablesAsCaptured();
2197
2198 auto stackEntry = m_lexicalScopeStack.takeLast();
2199 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2200 bool hasCapturedVariables = false;
2201 for (auto& entry : environment) {
2202 if (entry.value.isCaptured()) {
2203 hasCapturedVariables = true;
2204 continue;
2205 }
2206 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
2207 ASSERT(!symbolTableEntry.isNull());
2208 VarOffset offset = symbolTableEntry.varOffset();
2209 ASSERT(offset.isStack());
2210 RegisterID* local = &registerFor(offset.stackOffset());
2211 local->deref();
2212 }
2213
2214 if (hasCapturedVariables) {
2215 RELEASE_ASSERT(stackEntry.m_scope);
2216 emitPopScope(scopeRegister(), stackEntry.m_scope);
2217 popLocalControlFlowScope();
2218 stackEntry.m_scope->deref();
2219 }
2220
2221 m_TDZStack.removeLast();
2222 m_cachedVariablesUnderTDZ = { };
2223}
2224
2225void BytecodeGenerator::prepareLexicalScopeForNextForLoopIteration(VariableEnvironmentNode* node, RegisterID* loopSymbolTable)
2226{
2227 VariableEnvironment& environment = node->lexicalVariables();
2228 if (!environment.size())
2229 return;
2230 if (shouldEmitDebugHooks())
2231 environment.markAllVariablesAsCaptured();
2232 if (!environment.hasCapturedVariables())
2233 return;
2234
2235 RELEASE_ASSERT(loopSymbolTable);
2236
2237 // This function needs to do setup for a for loop's activation if any of
2238 // the for loop's lexically declared variables are captured (that is, variables
2239 // declared in the loop header, not the loop body). This function needs to
2240 // make a copy of the current activation and copy the values from the previous
2241 // activation into the new activation because each iteration of a for loop
2242 // gets a new activation.
2243
2244 auto stackEntry = m_lexicalScopeStack.last();
2245 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2246 RegisterID* loopScope = stackEntry.m_scope;
2247 ASSERT(symbolTable->scopeSize());
2248 ASSERT(loopScope);
2249 Vector<std::pair<RegisterID*, Identifier>> activationValuesToCopyOver;
2250
2251 {
2252 activationValuesToCopyOver.reserveInitialCapacity(symbolTable->scopeSize());
2253
2254 for (auto end = symbolTable->end(NoLockingNecessary), ptr = symbolTable->begin(NoLockingNecessary); ptr != end; ++ptr) {
2255 if (!ptr->value.varOffset().isScope())
2256 continue;
2257
2258 RefPtr<UniquedStringImpl> ident = ptr->key;
2259 Identifier identifier = Identifier::fromUid(m_vm, ident.get());
2260
2261 RegisterID* transitionValue = newBlockScopeVariable();
2262 transitionValue->ref();
2263 emitGetFromScope(transitionValue, loopScope, variableForLocalEntry(identifier, ptr->value, loopSymbolTable->index(), true), DoNotThrowIfNotFound);
2264 activationValuesToCopyOver.uncheckedAppend(std::make_pair(transitionValue, identifier));
2265 }
2266 }
2267
2268 // We need this dynamic behavior of the executing code to ensure
2269 // each loop iteration has a new activation object. (It's pretty ugly).
2270 // Also, this new activation needs to be assigned to the same register
2271 // as the previous scope because the loop body is compiled under
2272 // the assumption that the scope's register index is constant even
2273 // though the value in that register will change on each loop iteration.
2274 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), loopScope);
2275 move(scopeRegister(), parentScope.get());
2276
2277 OpCreateLexicalEnvironment::emit(this, loopScope, scopeRegister(), loopSymbolTable, addConstantValue(jsTDZValue()));
2278
2279 move(scopeRegister(), loopScope);
2280
2281 {
2282 for (auto pair : activationValuesToCopyOver) {
2283 const Identifier& identifier = pair.second;
2284 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, identifier.impl());
2285 RELEASE_ASSERT(!entry.isNull());
2286 RegisterID* transitionValue = pair.first;
2287 emitPutToScope(loopScope, variableForLocalEntry(identifier, entry, loopSymbolTable->index(), true), transitionValue, DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2288 transitionValue->deref();
2289 }
2290 }
2291}
2292
2293Variable BytecodeGenerator::variable(const Identifier& property, ThisResolutionType thisResolutionType)
2294{
2295 if (property == propertyNames().thisIdentifier && thisResolutionType == ThisResolutionType::Local)
2296 return Variable(property, VarOffset(thisRegister()->virtualRegister()), thisRegister(), static_cast<unsigned>(PropertyAttribute::ReadOnly), Variable::SpecialVariable, 0, false);
2297
2298 // We can optimize lookups if the lexical variable is found before a "with" or "catch"
2299 // scope because we're guaranteed static resolution. If we have to pass through
2300 // a "with" or "catch" scope we loose this guarantee.
2301 // We can't optimize cases like this:
2302 // {
2303 // let x = ...;
2304 // with (o) {
2305 // doSomethingWith(x);
2306 // }
2307 // }
2308 // Because we can't gaurantee static resolution on x.
2309 // But, in this case, we are guaranteed static resolution:
2310 // {
2311 // let x = ...;
2312 // with (o) {
2313 // let x = ...;
2314 // doSomethingWith(x);
2315 // }
2316 // }
2317 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2318 auto& stackEntry = m_lexicalScopeStack[i];
2319 if (stackEntry.m_isWithScope)
2320 return Variable(property);
2321 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2322 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, property.impl());
2323 if (symbolTableEntry.isNull())
2324 continue;
2325 bool resultIsCallee = false;
2326 if (symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2327 if (m_usesNonStrictEval) {
2328 // We don't know if an eval has introduced a "var" named the same thing as the function name scope variable name.
2329 // We resort to dynamic lookup to answer this question.
2330 Variable result = Variable(property);
2331 return result;
2332 }
2333 resultIsCallee = true;
2334 }
2335 Variable result = variableForLocalEntry(property, symbolTableEntry, stackEntry.m_symbolTableConstantIndex, symbolTable->scopeType() == SymbolTable::ScopeType::LexicalScope);
2336 if (resultIsCallee)
2337 result.setIsReadOnly();
2338 return result;
2339 }
2340
2341 return Variable(property);
2342}
2343
2344Variable BytecodeGenerator::variableForLocalEntry(
2345 const Identifier& property, const SymbolTableEntry& entry, int symbolTableConstantIndex, bool isLexicallyScoped)
2346{
2347 VarOffset offset = entry.varOffset();
2348
2349 RegisterID* local;
2350 if (offset.isStack())
2351 local = &registerFor(offset.stackOffset());
2352 else
2353 local = nullptr;
2354
2355 return Variable(property, offset, local, entry.getAttributes(), Variable::NormalVariable, symbolTableConstantIndex, isLexicallyScoped);
2356}
2357
2358void BytecodeGenerator::createVariable(
2359 const Identifier& property, VarKind varKind, SymbolTable* symbolTable, ExistingVariableMode existingVariableMode)
2360{
2361 ASSERT(property != propertyNames().thisIdentifier);
2362 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, property.impl());
2363
2364 if (!entry.isNull()) {
2365 if (existingVariableMode == IgnoreExisting)
2366 return;
2367
2368 // Do some checks to ensure that the variable we're being asked to create is sufficiently
2369 // compatible with the one we have already created.
2370
2371 VarOffset offset = entry.varOffset();
2372
2373 // We can't change our minds about whether it's captured.
2374 if (offset.kind() != varKind) {
2375 dataLog(
2376 "Trying to add variable called ", property, " as ", varKind,
2377 " but it was already added as ", offset, ".\n");
2378 RELEASE_ASSERT_NOT_REACHED();
2379 }
2380
2381 return;
2382 }
2383
2384 VarOffset varOffset;
2385 if (varKind == VarKind::Scope)
2386 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
2387 else {
2388 ASSERT(varKind == VarKind::Stack);
2389 varOffset = VarOffset(virtualRegisterForLocal(m_calleeLocals.size()));
2390 }
2391 SymbolTableEntry newEntry(varOffset, 0);
2392 symbolTable->add(NoLockingNecessary, property.impl(), newEntry);
2393
2394 if (varKind == VarKind::Stack) {
2395 RegisterID* local = addVar();
2396 RELEASE_ASSERT(local->index() == varOffset.stackOffset().offset());
2397 }
2398}
2399
2400RegisterID* BytecodeGenerator::emitOverridesHasInstance(RegisterID* dst, RegisterID* constructor, RegisterID* hasInstanceValue)
2401{
2402 OpOverridesHasInstance::emit(this, dst, constructor, hasInstanceValue);
2403 return dst;
2404}
2405
2406// Indicates the least upper bound of resolve type based on local scope. The bytecode linker
2407// will start with this ResolveType and compute the least upper bound including intercepting scopes.
2408ResolveType BytecodeGenerator::resolveType()
2409{
2410 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2411 if (m_lexicalScopeStack[i].m_isWithScope)
2412 return Dynamic;
2413 if (m_usesNonStrictEval && m_lexicalScopeStack[i].m_symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2414 // We never want to assign to a FunctionNameScope. Returning Dynamic here achieves this goal.
2415 // If we aren't in non-strict eval mode, then NodesCodeGen needs to take care not to emit
2416 // a put_to_scope with the destination being the function name scope variable.
2417 return Dynamic;
2418 }
2419 }
2420
2421 if (m_usesNonStrictEval)
2422 return GlobalPropertyWithVarInjectionChecks;
2423 return GlobalProperty;
2424}
2425
2426RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Variable& variable)
2427{
2428 switch (variable.offset().kind()) {
2429 case VarKind::Stack:
2430 return nullptr;
2431
2432 case VarKind::DirectArgument:
2433 return argumentsRegister();
2434
2435 case VarKind::Scope: {
2436 // This always refers to the activation that *we* allocated, and not the current scope that code
2437 // lives in. Note that this will change once we have proper support for block scoping. Once that
2438 // changes, it will be correct for this code to return scopeRegister(). The only reason why we
2439 // don't do that already is that m_lexicalEnvironment is required by ConstDeclNode. ConstDeclNode
2440 // requires weird things because it is a shameful pile of nonsense, but block scoping would make
2441 // that code sensible and obviate the need for us to do bad things.
2442 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2443 auto& stackEntry = m_lexicalScopeStack[i];
2444 // We should not resolve a variable to VarKind::Scope if a "with" scope lies in between the current
2445 // scope and the resolved scope.
2446 RELEASE_ASSERT(!stackEntry.m_isWithScope);
2447
2448 if (stackEntry.m_symbolTable->get(NoLockingNecessary, variable.ident().impl()).isNull())
2449 continue;
2450
2451 RegisterID* scope = stackEntry.m_scope;
2452 RELEASE_ASSERT(scope);
2453 return scope;
2454 }
2455
2456 RELEASE_ASSERT_NOT_REACHED();
2457 return nullptr;
2458
2459 }
2460 case VarKind::Invalid:
2461 // Indicates non-local resolution.
2462
2463 dst = tempDestination(dst);
2464 OpResolveScope::emit(this, kill(dst), scopeRegister(), addConstant(variable.ident()), resolveType(), localScopeDepth());
2465 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2466 return dst;
2467 }
2468
2469 RELEASE_ASSERT_NOT_REACHED();
2470 return nullptr;
2471}
2472
2473RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Variable& variable, ResolveMode resolveMode)
2474{
2475 switch (variable.offset().kind()) {
2476 case VarKind::Stack:
2477 return move(dst, variable.local());
2478
2479 case VarKind::DirectArgument: {
2480 OpGetFromArguments::emit(this, kill(dst), scope, variable.offset().capturedArgumentsOffset().offset());
2481 return dst;
2482 }
2483
2484 case VarKind::Scope:
2485 case VarKind::Invalid: {
2486 OpGetFromScope::emit(
2487 this,
2488 kill(dst),
2489 scope,
2490 addConstant(variable.ident()),
2491 GetPutInfo(resolveMode, variable.offset().isScope() ? LocalClosureVar : resolveType(), InitializationMode::NotInitialization),
2492 localScopeDepth(),
2493 variable.offset().isScope() ? variable.offset().scopeOffset().offset() : 0);
2494 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2495 return dst;
2496 } }
2497
2498 RELEASE_ASSERT_NOT_REACHED();
2499}
2500
2501RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Variable& variable, RegisterID* value, ResolveMode resolveMode, InitializationMode initializationMode)
2502{
2503 switch (variable.offset().kind()) {
2504 case VarKind::Stack:
2505 move(variable.local(), value);
2506 return value;
2507
2508 case VarKind::DirectArgument:
2509 OpPutToArguments::emit(this, scope, variable.offset().capturedArgumentsOffset().offset(), value);
2510 return value;
2511
2512 case VarKind::Scope:
2513 case VarKind::Invalid: {
2514 GetPutInfo getPutInfo(0);
2515 SymbolTableOrScopeDepth symbolTableOrScopeDepth;
2516 ScopeOffset offset;
2517 if (variable.offset().isScope()) {
2518 offset = variable.offset().scopeOffset();
2519 getPutInfo = GetPutInfo(resolveMode, LocalClosureVar, initializationMode);
2520 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::symbolTable(VirtualRegister { variable.symbolTableConstantIndex() });
2521 } else {
2522 ASSERT(resolveType() != LocalClosureVar);
2523 getPutInfo = GetPutInfo(resolveMode, resolveType(), initializationMode);
2524 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::scopeDepth(localScopeDepth());
2525 }
2526 OpPutToScope::emit(this, scope, addConstant(variable.ident()), value, getPutInfo, symbolTableOrScopeDepth, !!offset ? offset.offset() : 0);
2527 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2528 return value;
2529 } }
2530
2531 RELEASE_ASSERT_NOT_REACHED();
2532}
2533
2534RegisterID* BytecodeGenerator::initializeVariable(const Variable& variable, RegisterID* value)
2535{
2536 RELEASE_ASSERT(variable.offset().kind() != VarKind::Invalid);
2537 RegisterID* scope = emitResolveScope(nullptr, variable);
2538 return emitPutToScope(scope, variable, value, ThrowIfNotFound, InitializationMode::NotInitialization);
2539}
2540
2541RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
2542{
2543 OpInstanceof::emit(this, dst, value, basePrototype);
2544 return dst;
2545}
2546
2547RegisterID* BytecodeGenerator::emitInstanceOfCustom(RegisterID* dst, RegisterID* value, RegisterID* constructor, RegisterID* hasInstanceValue)
2548{
2549 OpInstanceofCustom::emit(this, dst, value, constructor, hasInstanceValue);
2550 return dst;
2551}
2552
2553RegisterID* BytecodeGenerator::emitInByVal(RegisterID* dst, RegisterID* property, RegisterID* base)
2554{
2555 OpInByVal::emit(this, dst, base, property);
2556 return dst;
2557}
2558
2559RegisterID* BytecodeGenerator::emitInById(RegisterID* dst, RegisterID* base, const Identifier& property)
2560{
2561 OpInById::emit(this, dst, base, addConstant(property));
2562 return dst;
2563}
2564
2565RegisterID* BytecodeGenerator::emitTryGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2566{
2567 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties are not supported with tryGetById.");
2568
2569 OpTryGetById::emit(this, kill(dst), base, addConstant(property));
2570 return dst;
2571}
2572
2573RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2574{
2575 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2576
2577 OpGetById::emit(this, kill(dst), base, addConstant(property));
2578 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2579 return dst;
2580}
2581
2582RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, RegisterID* thisVal, const Identifier& property)
2583{
2584 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2585
2586 OpGetByIdWithThis::emit(this, kill(dst), base, thisVal, addConstant(property));
2587 return dst;
2588}
2589
2590RegisterID* BytecodeGenerator::emitDirectGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2591{
2592 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val_direct.");
2593
2594 OpGetByIdDirect::emit(this, kill(dst), base, addConstant(property));
2595 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2596 return dst;
2597}
2598
2599RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
2600{
2601 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2602
2603 unsigned propertyIndex = addConstant(property);
2604
2605 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2606
2607 OpPutById::emit(this, base, propertyIndex, value, PutByIdNone); // is not direct
2608 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2609
2610 return value;
2611}
2612
2613RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, RegisterID* thisValue, const Identifier& property, RegisterID* value)
2614{
2615 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2616
2617 unsigned propertyIndex = addConstant(property);
2618
2619 OpPutByIdWithThis::emit(this, base, thisValue, propertyIndex, value);
2620
2621 return value;
2622}
2623
2624RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value, PropertyNode::PutType putType)
2625{
2626 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val(direct).");
2627
2628 unsigned propertyIndex = addConstant(property);
2629
2630 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2631
2632 PutByIdFlags type = (putType == PropertyNode::KnownDirect || property != m_vm->propertyNames->underscoreProto) ? PutByIdIsDirect : PutByIdNone;
2633 OpPutById::emit(this, base, propertyIndex, value, type);
2634 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2635 return value;
2636}
2637
2638void BytecodeGenerator::emitPutGetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter)
2639{
2640 unsigned propertyIndex = addConstant(property);
2641 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2642
2643 OpPutGetterById::emit(this, base, propertyIndex, attributes, getter);
2644}
2645
2646void BytecodeGenerator::emitPutSetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* setter)
2647{
2648 unsigned propertyIndex = addConstant(property);
2649 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2650
2651 OpPutSetterById::emit(this, base, propertyIndex, attributes, setter);
2652}
2653
2654void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter, RegisterID* setter)
2655{
2656 unsigned propertyIndex = addConstant(property);
2657
2658 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2659
2660 OpPutGetterSetterById::emit(this, base, propertyIndex, attributes, getter, setter);
2661}
2662
2663void BytecodeGenerator::emitPutGetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* getter)
2664{
2665 OpPutGetterByVal::emit(this, base, property, attributes, getter);
2666}
2667
2668void BytecodeGenerator::emitPutSetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* setter)
2669{
2670 OpPutSetterByVal::emit(this, base, property, attributes, setter);
2671}
2672
2673void BytecodeGenerator::emitPutGeneratorFields(RegisterID* nextFunction)
2674{
2675 // FIXME: Currently, we just create an object and store generator related fields as its properties for ease.
2676 // But to make it efficient, we will introduce JSGenerator class, add opcode new_generator and use its C++ fields instead of these private properties.
2677 // https://bugs.webkit.org/show_bug.cgi?id=151545
2678
2679 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2680
2681 // We do not store 'this' in arrow function within constructor,
2682 // because it might be not initialized, if super is called later.
2683 if (!(isDerivedConstructorContext() && m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionMode))
2684 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2685
2686 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(0)), PropertyNode::KnownDirect);
2687
2688 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2689}
2690
2691void BytecodeGenerator::emitPutAsyncGeneratorFields(RegisterID* nextFunction)
2692{
2693 ASSERT(isAsyncGeneratorWrapperParseMode(parseMode()));
2694
2695 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2696
2697 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2698
2699 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorState::SuspendedStart))), PropertyNode::KnownDirect);
2700
2701 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2702
2703 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::None))), PropertyNode::KnownDirect);
2704
2705 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueFirstPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2706 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueLastPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2707}
2708
2709RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
2710{
2711 OpDelById::emit(this, dst, base, addConstant(property));
2712 return dst;
2713}
2714
2715RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2716{
2717 for (size_t i = m_forInContextStack.size(); i--; ) {
2718 ForInContext& context = m_forInContextStack[i].get();
2719 if (context.local() != property)
2720 continue;
2721
2722 if (context.isIndexedForInContext()) {
2723 auto& indexedContext = context.asIndexedForInContext();
2724 OpGetByVal::emit<OpcodeSize::Wide>(this, kill(dst), base, indexedContext.index());
2725 indexedContext.addGetInst(m_lastInstruction.offset(), property->index());
2726 return dst;
2727 }
2728
2729 StructureForInContext& structureContext = context.asStructureForInContext();
2730 OpGetDirectPname::emit<OpcodeSize::Wide>(this, kill(dst), base, property, structureContext.index(), structureContext.enumerator());
2731
2732 structureContext.addGetInst(m_lastInstruction.offset(), property->index());
2733 return dst;
2734 }
2735
2736 OpGetByVal::emit(this, kill(dst), base, property);
2737 return dst;
2738}
2739
2740RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* thisValue, RegisterID* property)
2741{
2742 OpGetByValWithThis::emit(this, kill(dst), base, thisValue, property);
2743 return dst;
2744}
2745
2746RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2747{
2748 OpPutByVal::emit(this, base, property, value);
2749 return value;
2750}
2751
2752RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* thisValue, RegisterID* property, RegisterID* value)
2753{
2754 OpPutByValWithThis::emit(this, base, thisValue, property, value);
2755 return value;
2756}
2757
2758RegisterID* BytecodeGenerator::emitDirectPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2759{
2760 OpPutByValDirect::emit(this, base, property, value);
2761 return value;
2762}
2763
2764RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2765{
2766 OpDelByVal::emit(this, dst, base, property);
2767 return dst;
2768}
2769
2770void BytecodeGenerator::emitSuperSamplerBegin()
2771{
2772 OpSuperSamplerBegin::emit(this);
2773}
2774
2775void BytecodeGenerator::emitSuperSamplerEnd()
2776{
2777 OpSuperSamplerEnd::emit(this);
2778}
2779
2780RegisterID* BytecodeGenerator::emitIdWithProfile(RegisterID* src, SpeculatedType profile)
2781{
2782 OpIdentityWithProfile::emit(this, src, static_cast<uint32_t>(profile >> 32), static_cast<uint32_t>(profile));
2783 return src;
2784}
2785
2786void BytecodeGenerator::emitUnreachable()
2787{
2788 OpUnreachable::emit(this);
2789}
2790
2791RegisterID* BytecodeGenerator::emitGetArgument(RegisterID* dst, int32_t index)
2792{
2793 OpGetArgument::emit(this, dst, index + 1 /* Including |this| */);
2794 return dst;
2795}
2796
2797RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
2798{
2799 OpCreateThis::emit(this, dst, dst, 0);
2800 m_staticPropertyAnalyzer.createThis(dst, m_lastInstruction);
2801
2802 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2803 return dst;
2804}
2805
2806void BytecodeGenerator::emitTDZCheck(RegisterID* target)
2807{
2808 OpCheckTdz::emit(this, target);
2809}
2810
2811bool BytecodeGenerator::needsTDZCheck(const Variable& variable)
2812{
2813 for (unsigned i = m_TDZStack.size(); i--;) {
2814 auto iter = m_TDZStack[i].find(variable.ident().impl());
2815 if (iter == m_TDZStack[i].end())
2816 continue;
2817 return iter->value != TDZNecessityLevel::NotNeeded;
2818 }
2819
2820 return false;
2821}
2822
2823void BytecodeGenerator::emitTDZCheckIfNecessary(const Variable& variable, RegisterID* target, RegisterID* scope)
2824{
2825 if (needsTDZCheck(variable)) {
2826 if (target)
2827 emitTDZCheck(target);
2828 else {
2829 RELEASE_ASSERT(!variable.isLocal() && scope);
2830 RefPtr<RegisterID> result = emitGetFromScope(newTemporary(), scope, variable, DoNotThrowIfNotFound);
2831 emitTDZCheck(result.get());
2832 }
2833 }
2834}
2835
2836void BytecodeGenerator::liftTDZCheckIfPossible(const Variable& variable)
2837{
2838 RefPtr<UniquedStringImpl> identifier(variable.ident().impl());
2839 for (unsigned i = m_TDZStack.size(); i--;) {
2840 auto iter = m_TDZStack[i].find(identifier);
2841 if (iter != m_TDZStack[i].end()) {
2842 if (iter->value == TDZNecessityLevel::Optimize) {
2843 m_cachedVariablesUnderTDZ = { };
2844 iter->value = TDZNecessityLevel::NotNeeded;
2845 }
2846 break;
2847 }
2848 }
2849}
2850
2851void BytecodeGenerator::pushTDZVariables(const VariableEnvironment& environment, TDZCheckOptimization optimization, TDZRequirement requirement)
2852{
2853 if (!environment.size())
2854 return;
2855
2856 TDZNecessityLevel level;
2857 if (requirement == TDZRequirement::UnderTDZ) {
2858 if (optimization == TDZCheckOptimization::Optimize)
2859 level = TDZNecessityLevel::Optimize;
2860 else
2861 level = TDZNecessityLevel::DoNotOptimize;
2862 } else
2863 level = TDZNecessityLevel::NotNeeded;
2864
2865 TDZMap map;
2866 for (const auto& entry : environment)
2867 map.add(entry.key, entry.value.isFunction() ? TDZNecessityLevel::NotNeeded : level);
2868
2869 m_TDZStack.append(WTFMove(map));
2870 m_cachedVariablesUnderTDZ = { };
2871}
2872
2873Optional<CompactVariableMap::Handle> BytecodeGenerator::getVariablesUnderTDZ()
2874{
2875 if (m_cachedVariablesUnderTDZ) {
2876 if (!m_hasCachedVariablesUnderTDZ) {
2877 ASSERT(m_cachedVariablesUnderTDZ.environment().toVariableEnvironment().isEmpty());
2878 return WTF::nullopt;
2879 }
2880 return m_cachedVariablesUnderTDZ;
2881 }
2882
2883 // We keep track of variablesThatDontNeedTDZ in this algorithm to prevent
2884 // reporting that "x" is under TDZ if this function is called at "...".
2885 //
2886 // {
2887 // {
2888 // let x;
2889 // ...
2890 // }
2891 // let x;
2892 // }
2893 SmallPtrSet<UniquedStringImpl*, 16> variablesThatDontNeedTDZ;
2894 VariableEnvironment environment;
2895 for (unsigned i = m_TDZStack.size(); i--; ) {
2896 auto& map = m_TDZStack[i];
2897 for (auto& entry : map) {
2898 if (entry.value != TDZNecessityLevel::NotNeeded) {
2899 if (!variablesThatDontNeedTDZ.contains(entry.key.get()))
2900 environment.add(entry.key.get());
2901 } else
2902 variablesThatDontNeedTDZ.add(entry.key.get());
2903 }
2904 }
2905
2906 m_cachedVariablesUnderTDZ = m_vm->m_compactVariableMap->get(environment);
2907 m_hasCachedVariablesUnderTDZ = !environment.isEmpty();
2908 if (!m_hasCachedVariablesUnderTDZ)
2909 return WTF::nullopt;
2910
2911 return m_cachedVariablesUnderTDZ;
2912}
2913
2914void BytecodeGenerator::preserveTDZStack(BytecodeGenerator::PreservedTDZStack& preservedStack)
2915{
2916 preservedStack.m_preservedTDZStack = m_TDZStack;
2917}
2918
2919void BytecodeGenerator::restoreTDZStack(const BytecodeGenerator::PreservedTDZStack& preservedStack)
2920{
2921 m_TDZStack = preservedStack.m_preservedTDZStack;
2922 m_cachedVariablesUnderTDZ = { };
2923}
2924
2925RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
2926{
2927 OpNewObject::emit(this, dst, 0);
2928 m_staticPropertyAnalyzer.newObject(dst, m_lastInstruction);
2929
2930 return dst;
2931}
2932
2933JSValue BytecodeGenerator::addBigIntConstant(const Identifier& identifier, uint8_t radix, bool sign)
2934{
2935 return m_bigIntMap.ensure(BigIntMapEntry(identifier.impl(), radix, sign), [&] {
2936 auto scope = DECLARE_CATCH_SCOPE(*vm());
2937 auto parseIntSign = sign ? JSBigInt::ParseIntSign::Signed : JSBigInt::ParseIntSign::Unsigned;
2938 JSBigInt* bigIntInMap = JSBigInt::parseInt(nullptr, *vm(), identifier.string(), radix, JSBigInt::ErrorParseMode::ThrowExceptions, parseIntSign);
2939 // FIXME: [ESNext] Enables a way to throw an error on ByteCodeGenerator step
2940 // https://bugs.webkit.org/show_bug.cgi?id=180139
2941 scope.assertNoException();
2942 RELEASE_ASSERT(bigIntInMap);
2943 addConstantValue(bigIntInMap);
2944
2945 return bigIntInMap;
2946 }).iterator->value;
2947}
2948
2949JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
2950{
2951 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
2952 if (!stringInMap) {
2953 stringInMap = jsString(vm(), identifier.string());
2954 addConstantValue(stringInMap);
2955 }
2956 return stringInMap;
2957}
2958
2959RegisterID* BytecodeGenerator::addTemplateObjectConstant(Ref<TemplateObjectDescriptor>&& descriptor, int endOffset)
2960{
2961 auto result = m_templateObjectDescriptorSet.add(WTFMove(descriptor));
2962 JSTemplateObjectDescriptor* descriptorValue = m_templateDescriptorMap.ensure(endOffset, [&] {
2963 return JSTemplateObjectDescriptor::create(*vm(), result.iterator->copyRef(), endOffset);
2964 }).iterator->value;
2965 int index = addConstantIndex();
2966 m_codeBlock->addConstant(descriptorValue);
2967 return &m_constantPoolRegisters[index];
2968}
2969
2970RegisterID* BytecodeGenerator::emitNewArrayBuffer(RegisterID* dst, JSImmutableButterfly* array, IndexingType recommendedIndexingType)
2971{
2972 OpNewArrayBuffer::emit(this, dst, addConstantValue(array), recommendedIndexingType);
2973 return dst;
2974}
2975
2976RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length, IndexingType recommendedIndexingType)
2977{
2978 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
2979 for (ElementNode* n = elements; n; n = n->next()) {
2980 if (!length)
2981 break;
2982 length--;
2983 ASSERT(!n->value()->isSpreadExpression());
2984 argv.append(newTemporary());
2985 // op_new_array requires the initial values to be a sequential range of registers
2986 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
2987 emitNode(argv.last().get(), n->value());
2988 }
2989 ASSERT(!length);
2990 OpNewArray::emit(this, dst, argv.size() ? argv[0].get() : VirtualRegister { 0 }, argv.size(), recommendedIndexingType);
2991 return dst;
2992}
2993
2994RegisterID* BytecodeGenerator::emitNewArrayWithSpread(RegisterID* dst, ElementNode* elements)
2995{
2996 BitVector bitVector;
2997 Vector<RefPtr<RegisterID>, 16> argv;
2998 for (ElementNode* node = elements; node; node = node->next()) {
2999 bitVector.set(argv.size(), node->value()->isSpreadExpression());
3000
3001 argv.append(newTemporary());
3002 // op_new_array_with_spread requires the initial values to be a sequential range of registers.
3003 RELEASE_ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
3004 }
3005
3006 RELEASE_ASSERT(argv.size());
3007
3008 {
3009 unsigned i = 0;
3010 for (ElementNode* node = elements; node; node = node->next()) {
3011 if (node->value()->isSpreadExpression()) {
3012 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(node->value())->expression();
3013 RefPtr<RegisterID> tmp = newTemporary();
3014 emitNode(tmp.get(), expression);
3015
3016 OpSpread::emit(this, argv[i].get(), tmp.get());
3017 } else {
3018 ExpressionNode* expression = node->value();
3019 emitNode(argv[i].get(), expression);
3020 }
3021 i++;
3022 }
3023 }
3024
3025 unsigned bitVectorIndex = m_codeBlock->addBitVector(WTFMove(bitVector));
3026 OpNewArrayWithSpread::emit(this, dst, argv[0].get(), argv.size(), bitVectorIndex);
3027 return dst;
3028}
3029
3030RegisterID* BytecodeGenerator::emitNewArrayWithSize(RegisterID* dst, RegisterID* length)
3031{
3032 OpNewArrayWithSize::emit(this, dst, length);
3033 return dst;
3034}
3035
3036RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
3037{
3038 OpNewRegexp::emit(this, dst, addConstantValue(regExp));
3039 return dst;
3040}
3041
3042void BytecodeGenerator::emitNewFunctionExpressionCommon(RegisterID* dst, FunctionMetadataNode* function)
3043{
3044 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
3045
3046 switch (function->parseMode()) {
3047 case SourceParseMode::GeneratorWrapperFunctionMode:
3048 case SourceParseMode::GeneratorWrapperMethodMode:
3049 OpNewGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3050 break;
3051 case SourceParseMode::AsyncFunctionMode:
3052 case SourceParseMode::AsyncMethodMode:
3053 case SourceParseMode::AsyncArrowFunctionMode:
3054 OpNewAsyncFuncExp::emit(this, dst, scopeRegister(), index);
3055 break;
3056 case SourceParseMode::AsyncGeneratorWrapperFunctionMode:
3057 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
3058 OpNewAsyncGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3059 break;
3060 default:
3061 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3062 break;
3063 }
3064}
3065
3066RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* dst, FuncExprNode* func)
3067{
3068 emitNewFunctionExpressionCommon(dst, func->metadata());
3069 return dst;
3070}
3071
3072RegisterID* BytecodeGenerator::emitNewArrowFunctionExpression(RegisterID* dst, ArrowFuncExprNode* func)
3073{
3074 ASSERT(SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(func->metadata()->parseMode()));
3075 emitNewFunctionExpressionCommon(dst, func->metadata());
3076 return dst;
3077}
3078
3079RegisterID* BytecodeGenerator::emitNewMethodDefinition(RegisterID* dst, MethodDefinitionNode* func)
3080{
3081 ASSERT(isMethodParseMode(func->metadata()->parseMode()));
3082 emitNewFunctionExpressionCommon(dst, func->metadata());
3083 return dst;
3084}
3085
3086RegisterID* BytecodeGenerator::emitNewDefaultConstructor(RegisterID* dst, ConstructorKind constructorKind, const Identifier& name,
3087 const Identifier& ecmaName, const SourceCode& classSource)
3088{
3089 UnlinkedFunctionExecutable* executable = m_vm->builtinExecutables()->createDefaultConstructor(constructorKind, name);
3090 executable->setInvalidTypeProfilingOffsets();
3091 executable->setEcmaName(ecmaName);
3092 executable->setClassSource(classSource);
3093
3094 unsigned index = m_codeBlock->addFunctionExpr(executable);
3095
3096 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3097 return dst;
3098}
3099
3100RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionMetadataNode* function)
3101{
3102 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(function));
3103 if (isGeneratorWrapperParseMode(function->parseMode()))
3104 OpNewGeneratorFunc::emit(this, dst, scopeRegister(), index);
3105 else if (function->parseMode() == SourceParseMode::AsyncFunctionMode)
3106 OpNewAsyncFunc::emit(this, dst, scopeRegister(), index);
3107 else if (isAsyncGeneratorWrapperParseMode(function->parseMode()))
3108 OpNewAsyncGeneratorFunc::emit(this, dst, scopeRegister(), index);
3109 else
3110 OpNewFunc::emit(this, dst, scopeRegister(), index);
3111 return dst;
3112}
3113
3114void BytecodeGenerator::emitSetFunctionNameIfNeeded(ExpressionNode* valueNode, RegisterID* value, RegisterID* name)
3115{
3116 if (valueNode->isBaseFuncExprNode()) {
3117 FunctionMetadataNode* metadata = static_cast<BaseFuncExprNode*>(valueNode)->metadata();
3118 if (!metadata->ecmaName().isNull())
3119 return;
3120 } else if (valueNode->isClassExprNode()) {
3121 ClassExprNode* classExprNode = static_cast<ClassExprNode*>(valueNode);
3122 if (!classExprNode->ecmaName().isNull())
3123 return;
3124 if (classExprNode->hasStaticProperty(m_vm->propertyNames->name))
3125 return;
3126 } else
3127 return;
3128
3129 // FIXME: We should use an op_call to an internal function here instead.
3130 // https://bugs.webkit.org/show_bug.cgi?id=155547
3131 OpSetFunctionName::emit(this, value, name);
3132}
3133
3134RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3135{
3136 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3137}
3138
3139RegisterID* BytecodeGenerator::emitCallInTailPosition(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3140{
3141 if (m_inTailPosition) {
3142 m_codeBlock->setHasTailCalls();
3143 return emitCall<OpTailCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3144 }
3145 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3146}
3147
3148RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3149{
3150 return emitCall<OpCallEval>(dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3151}
3152
3153ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
3154{
3155 if (identifier == propertyNames().Object || identifier == propertyNames().builtinNames().ObjectPrivateName())
3156 return ExpectObjectConstructor;
3157 if (identifier == propertyNames().Array || identifier == propertyNames().builtinNames().ArrayPrivateName())
3158 return ExpectArrayConstructor;
3159 return NoExpectedFunction;
3160}
3161
3162ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label& done)
3163{
3164 Ref<Label> realCall = newLabel();
3165 switch (expectedFunction) {
3166 case ExpectObjectConstructor: {
3167 // If the number of arguments is non-zero, then we can't do anything interesting.
3168 if (callArguments.argumentCountIncludingThis() >= 2)
3169 return NoExpectedFunction;
3170
3171 OpJneqPtr::emit(this, func, Special::ObjectConstructor, realCall->bind(this));
3172
3173 if (dst != ignoredResult())
3174 emitNewObject(dst);
3175 break;
3176 }
3177
3178 case ExpectArrayConstructor: {
3179 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
3180 // don't do inline it, for now. The only reason is that call arguments are in
3181 // the opposite order of what op_new_array expects, so we'd either need to change
3182 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
3183 // things sounds like it's worth it.
3184 if (callArguments.argumentCountIncludingThis() > 2)
3185 return NoExpectedFunction;
3186
3187 OpJneqPtr::emit(this, func, Special::ArrayConstructor, realCall->bind(this));
3188
3189 if (dst != ignoredResult()) {
3190 if (callArguments.argumentCountIncludingThis() == 2)
3191 emitNewArrayWithSize(dst, callArguments.argumentRegister(0));
3192 else {
3193 ASSERT(callArguments.argumentCountIncludingThis() == 1);
3194 OpNewArray::emit(this, dst, VirtualRegister { 0 }, 0, ArrayWithUndecided);
3195 }
3196 }
3197 break;
3198 }
3199
3200 default:
3201 ASSERT(expectedFunction == NoExpectedFunction);
3202 return NoExpectedFunction;
3203 }
3204
3205 OpJmp::emit(this, done.bind(this));
3206 emitLabel(realCall.get());
3207
3208 return expectedFunction;
3209}
3210
3211template<typename CallOp>
3212RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3213{
3214 constexpr auto opcodeID = CallOp::opcodeID;
3215 ASSERT(opcodeID == op_call || opcodeID == op_call_eval || opcodeID == op_tail_call);
3216 ASSERT(func->refCount());
3217
3218 // Generate code for arguments.
3219 unsigned argument = 0;
3220 if (callArguments.argumentsNode()) {
3221 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3222 if (n && n->m_expr->isSpreadExpression()) {
3223 RELEASE_ASSERT(!n->m_next);
3224 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3225 if (expression->isArrayLiteral()) {
3226 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3227 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3228 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3229 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3230 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3231
3232 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3233 }
3234 }
3235 RefPtr<RegisterID> argumentRegister;
3236 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3237 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3238 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3239 }
3240 for (; n; n = n->m_next)
3241 emitNode(callArguments.argumentRegister(argument++), n);
3242 }
3243
3244 // Reserve space for call frame.
3245 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3246 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3247 callFrame.append(newTemporary());
3248
3249 if (shouldEmitDebugHooks() && debuggableCall == DebuggableCall::Yes)
3250 emitDebugHook(WillExecuteExpression, divotStart);
3251
3252 emitExpressionInfo(divot, divotStart, divotEnd);
3253
3254 Ref<Label> done = newLabel();
3255 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3256
3257 if (opcodeID == op_tail_call)
3258 emitLogShadowChickenTailIfNecessary();
3259
3260 // Emit call.
3261 ASSERT(dst);
3262 ASSERT(dst != ignoredResult());
3263 CallOp::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3264
3265 if (expectedFunction != NoExpectedFunction)
3266 emitLabel(done.get());
3267
3268 return dst;
3269}
3270
3271RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3272{
3273 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3274}
3275
3276RegisterID* BytecodeGenerator::emitCallVarargsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3277{
3278 if (m_inTailPosition)
3279 return emitCallVarargs<OpTailCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3280 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3281}
3282
3283RegisterID* BytecodeGenerator::emitConstructVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3284{
3285 return emitCallVarargs<OpConstructVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3286}
3287
3288RegisterID* BytecodeGenerator::emitCallForwardArgumentsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3289{
3290 // We must emit a tail call here because we did not allocate an arguments object thus we would otherwise have no way to correctly make this call.
3291 ASSERT(m_inTailPosition || !Options::useTailCalls());
3292 return emitCallVarargs<OpTailCallForwardArguments>(dst, func, thisRegister, nullptr, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3293}
3294
3295template<typename VarargsOp>
3296RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3297{
3298 if (shouldEmitDebugHooks() && debuggableCall == DebuggableCall::Yes)
3299 emitDebugHook(WillExecuteExpression, divotStart);
3300
3301 emitExpressionInfo(divot, divotStart, divotEnd);
3302
3303 if (VarargsOp::opcodeID == op_tail_call_varargs)
3304 emitLogShadowChickenTailIfNecessary();
3305
3306 // Emit call.
3307 ASSERT(dst != ignoredResult());
3308 VarargsOp::emit(this, dst, func, thisRegister, arguments ? arguments : VirtualRegister(0), firstFreeRegister, firstVarArgOffset);
3309 return dst;
3310}
3311
3312void BytecodeGenerator::emitLogShadowChickenPrologueIfNecessary()
3313{
3314 if (!shouldEmitDebugHooks() && !Options::alwaysUseShadowChicken())
3315 return;
3316 OpLogShadowChickenPrologue::emit(this, scopeRegister());
3317}
3318
3319void BytecodeGenerator::emitLogShadowChickenTailIfNecessary()
3320{
3321 if (!shouldEmitDebugHooks() && !Options::alwaysUseShadowChicken())
3322 return;
3323 OpLogShadowChickenTail::emit(this, thisRegister(), scopeRegister());
3324}
3325
3326void BytecodeGenerator::emitCallDefineProperty(RegisterID* newObj, RegisterID* propertyNameRegister,
3327 RegisterID* valueRegister, RegisterID* getterRegister, RegisterID* setterRegister, unsigned options, const JSTextPosition& position)
3328{
3329 DefinePropertyAttributes attributes;
3330 if (options & PropertyConfigurable)
3331 attributes.setConfigurable(true);
3332
3333 if (options & PropertyWritable)
3334 attributes.setWritable(true);
3335 else if (valueRegister)
3336 attributes.setWritable(false);
3337
3338 if (options & PropertyEnumerable)
3339 attributes.setEnumerable(true);
3340
3341 if (valueRegister)
3342 attributes.setValue();
3343 if (getterRegister)
3344 attributes.setGet();
3345 if (setterRegister)
3346 attributes.setSet();
3347
3348 ASSERT(!valueRegister || (!getterRegister && !setterRegister));
3349
3350 emitExpressionInfo(position, position, position);
3351
3352 if (attributes.hasGet() || attributes.hasSet()) {
3353 RefPtr<RegisterID> throwTypeErrorFunction;
3354 if (!attributes.hasGet() || !attributes.hasSet())
3355 throwTypeErrorFunction = moveLinkTimeConstant(nullptr, LinkTimeConstant::ThrowTypeErrorFunction);
3356
3357 RefPtr<RegisterID> getter;
3358 if (attributes.hasGet())
3359 getter = getterRegister;
3360 else
3361 getter = throwTypeErrorFunction;
3362
3363 RefPtr<RegisterID> setter;
3364 if (attributes.hasSet())
3365 setter = setterRegister;
3366 else
3367 setter = throwTypeErrorFunction;
3368
3369 OpDefineAccessorProperty::emit(this, newObj, propertyNameRegister, getter.get(), setter.get(), emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3370 } else {
3371 OpDefineDataProperty::emit(this, newObj, propertyNameRegister, valueRegister, emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3372 }
3373}
3374
3375RegisterID* BytecodeGenerator::emitReturn(RegisterID* src, ReturnFrom from)
3376{
3377 if (isConstructor()) {
3378 bool isDerived = constructorKind() == ConstructorKind::Extends;
3379 bool srcIsThis = src->index() == m_thisRegister.index();
3380
3381 if (isDerived && (srcIsThis || from == ReturnFrom::Finally))
3382 emitTDZCheck(src);
3383
3384 if (!srcIsThis || from == ReturnFrom::Finally) {
3385 Ref<Label> isObjectLabel = newLabel();
3386 emitJumpIfTrue(emitIsObject(newTemporary(), src), isObjectLabel.get());
3387
3388 if (isDerived) {
3389 Ref<Label> isUndefinedLabel = newLabel();
3390 emitJumpIfTrue(emitIsUndefined(newTemporary(), src), isUndefinedLabel.get());
3391 emitThrowTypeError("Cannot return a non-object type in the constructor of a derived class.");
3392 emitLabel(isUndefinedLabel.get());
3393 emitTDZCheck(&m_thisRegister);
3394 }
3395 OpRet::emit(this, &m_thisRegister);
3396 emitLabel(isObjectLabel.get());
3397 }
3398 }
3399
3400 OpRet::emit(this, src);
3401 return src;
3402}
3403
3404RegisterID* BytecodeGenerator::emitEnd(RegisterID* src)
3405{
3406 OpEnd::emit(this, src);
3407 return src;
3408}
3409
3410
3411RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, RegisterID* lazyThis, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
3412{
3413 ASSERT(func->refCount());
3414
3415 // Generate code for arguments.
3416 unsigned argument = 0;
3417 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
3418
3419 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3420 if (n && n->m_expr->isSpreadExpression()) {
3421 RELEASE_ASSERT(!n->m_next);
3422 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3423 if (expression->isArrayLiteral()) {
3424 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3425 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3426 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3427 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3428 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3429
3430 move(callArguments.thisRegister(), lazyThis);
3431 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3432 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3433 }
3434 }
3435 RefPtr<RegisterID> argumentRegister;
3436 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3437 move(callArguments.thisRegister(), lazyThis);
3438 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3439 }
3440
3441 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
3442 emitNode(callArguments.argumentRegister(argument++), n);
3443 }
3444
3445 move(callArguments.thisRegister(), lazyThis);
3446
3447 // Reserve space for call frame.
3448 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3449 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3450 callFrame.append(newTemporary());
3451
3452 emitExpressionInfo(divot, divotStart, divotEnd);
3453
3454 Ref<Label> done = newLabel();
3455 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3456
3457 OpConstruct::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3458
3459 if (expectedFunction != NoExpectedFunction)
3460 emitLabel(done.get());
3461
3462 return dst;
3463}
3464
3465RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
3466{
3467 OpStrcat::emit(this, dst, src, count);
3468 return dst;
3469}
3470
3471void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
3472{
3473 OpToPrimitive::emit(this, dst, src);
3474}
3475
3476void BytecodeGenerator::emitGetScope()
3477{
3478 OpGetScope::emit(this, scopeRegister());
3479}
3480
3481RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* objectScope)
3482{
3483 pushLocalControlFlowScope();
3484 RegisterID* newScope = newBlockScopeVariable();
3485 newScope->ref();
3486
3487 OpPushWithScope::emit(this, newScope, scopeRegister(), objectScope);
3488
3489 move(scopeRegister(), newScope);
3490 m_lexicalScopeStack.append({ nullptr, newScope, true, 0 });
3491
3492 return newScope;
3493}
3494
3495RegisterID* BytecodeGenerator::emitGetParentScope(RegisterID* dst, RegisterID* scope)
3496{
3497 OpGetParentScope::emit(this, dst, scope);
3498 return dst;
3499}
3500
3501void BytecodeGenerator::emitPopScope(RegisterID* dst, RegisterID* scope)
3502{
3503 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), scope);
3504 move(dst, parentScope.get());
3505}
3506
3507void BytecodeGenerator::emitPopWithScope()
3508{
3509 emitPopScope(scopeRegister(), scopeRegister());
3510 popLocalControlFlowScope();
3511 auto stackEntry = m_lexicalScopeStack.takeLast();
3512 stackEntry.m_scope->deref();
3513 RELEASE_ASSERT(stackEntry.m_isWithScope);
3514}
3515
3516void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, const JSTextPosition& divot)
3517{
3518 if (!shouldEmitDebugHooks())
3519 return;
3520
3521 emitExpressionInfo(divot, divot, divot);
3522 OpDebug::emit(this, debugHookType, false);
3523}
3524
3525void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, unsigned line, unsigned charOffset, unsigned lineStart)
3526{
3527 emitDebugHook(debugHookType, JSTextPosition(line, charOffset, lineStart));
3528}
3529
3530void BytecodeGenerator::emitDebugHook(StatementNode* statement)
3531{
3532 // DebuggerStatementNode will output its own special debug hook.
3533 if (statement->isDebuggerStatement())
3534 return;
3535
3536 emitDebugHook(WillExecuteStatement, statement->position());
3537}
3538
3539void BytecodeGenerator::emitDebugHook(ExpressionNode* expr)
3540{
3541 emitDebugHook(WillExecuteStatement, expr->position());
3542}
3543
3544void BytecodeGenerator::emitWillLeaveCallFrameDebugHook()
3545{
3546 RELEASE_ASSERT(m_scopeNode->isFunctionNode());
3547 emitDebugHook(WillLeaveCallFrame, m_scopeNode->lastLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
3548}
3549
3550void BytecodeGenerator::pushFinallyControlFlowScope(FinallyContext& finallyContext)
3551{
3552 ControlFlowScope scope(ControlFlowScope::Finally, currentLexicalScopeIndex(), &finallyContext);
3553 m_controlFlowScopeStack.append(WTFMove(scope));
3554
3555 m_finallyDepth++;
3556 m_currentFinallyContext = &finallyContext;
3557}
3558
3559void BytecodeGenerator::popFinallyControlFlowScope()
3560{
3561 ASSERT(m_controlFlowScopeStack.size());
3562 ASSERT(m_controlFlowScopeStack.last().isFinallyScope());
3563 ASSERT(m_finallyDepth > 0);
3564 ASSERT(m_currentFinallyContext);
3565 m_currentFinallyContext = m_currentFinallyContext->outerContext();
3566 m_finallyDepth--;
3567 m_controlFlowScopeStack.removeLast();
3568}
3569
3570LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
3571{
3572 shrinkToFit(m_labelScopes);
3573
3574 if (!m_labelScopes.size())
3575 return nullptr;
3576
3577 // We special-case the following, which is a syntax error in Firefox:
3578 // label:
3579 // break;
3580 if (name.isEmpty()) {
3581 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3582 LabelScope& scope = m_labelScopes[i];
3583 if (scope.type() != LabelScope::NamedLabel)
3584 return &scope;
3585 }
3586 return nullptr;
3587 }
3588
3589 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3590 LabelScope& scope = m_labelScopes[i];
3591 if (scope.name() && *scope.name() == name)
3592 return &scope;
3593 }
3594 return nullptr;
3595}
3596
3597LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
3598{
3599 shrinkToFit(m_labelScopes);
3600
3601 if (!m_labelScopes.size())
3602 return nullptr;
3603
3604 if (name.isEmpty()) {
3605 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3606 LabelScope& scope = m_labelScopes[i];
3607 if (scope.type() == LabelScope::Loop) {
3608 ASSERT(scope.continueTarget());
3609 return &scope;
3610 }
3611 }
3612 return nullptr;
3613 }
3614
3615 // Continue to the loop nested nearest to the label scope that matches
3616 // 'name'.
3617 LabelScope* result = nullptr;
3618 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3619 LabelScope& scope = m_labelScopes[i];
3620 if (scope.type() == LabelScope::Loop) {
3621 ASSERT(scope.continueTarget());
3622 result = &scope;
3623 }
3624 if (scope.name() && *scope.name() == name)
3625 return result; // may be null.
3626 }
3627 return nullptr;
3628}
3629
3630void BytecodeGenerator::allocateCalleeSaveSpace()
3631{
3632 size_t virtualRegisterCountForCalleeSaves = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters();
3633
3634 for (size_t i = 0; i < virtualRegisterCountForCalleeSaves; i++) {
3635 RegisterID* localRegister = addVar();
3636 localRegister->ref();
3637 m_localRegistersForCalleeSaveRegisters.append(localRegister);
3638 }
3639}
3640
3641void BytecodeGenerator::allocateAndEmitScope()
3642{
3643 m_scopeRegister = addVar();
3644 m_scopeRegister->ref();
3645 m_codeBlock->setScopeRegister(scopeRegister()->virtualRegister());
3646 emitGetScope();
3647 m_topMostScope = addVar();
3648 move(m_topMostScope, scopeRegister());
3649}
3650
3651TryData* BytecodeGenerator::pushTry(Label& start, Label& handlerLabel, HandlerType handlerType)
3652{
3653 m_tryData.append(TryData { handlerLabel, handlerType });
3654 TryData* result = &m_tryData.last();
3655
3656 m_tryContextStack.append(TryContext {
3657 start,
3658 result
3659 });
3660
3661 return result;
3662}
3663
3664void BytecodeGenerator::popTry(TryData* tryData, Label& end)
3665{
3666 m_usesExceptions = true;
3667
3668 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
3669
3670 m_tryRanges.append(TryRange {
3671 m_tryContextStack.last().start.copyRef(),
3672 end,
3673 m_tryContextStack.last().tryData
3674 });
3675 m_tryContextStack.removeLast();
3676}
3677
3678void BytecodeGenerator::emitOutOfLineCatchHandler(RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3679{
3680 RegisterID* unused = newTemporary();
3681 emitOutOfLineExceptionHandler(unused, thrownValueRegister, completionTypeRegister, data);
3682}
3683
3684void BytecodeGenerator::emitOutOfLineFinallyHandler(RegisterID* exceptionRegister, RegisterID* completionTypeRegister, TryData* data)
3685{
3686 RegisterID* unused = newTemporary();
3687 ASSERT(completionTypeRegister);
3688 emitOutOfLineExceptionHandler(exceptionRegister, unused, completionTypeRegister, data);
3689}
3690
3691void BytecodeGenerator::emitOutOfLineExceptionHandler(RegisterID* exceptionRegister, RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3692{
3693 VirtualRegister completionTypeVirtualRegister = completionTypeRegister ? completionTypeRegister : VirtualRegister();
3694 m_exceptionHandlersToEmit.append({ data, exceptionRegister, thrownValueRegister, completionTypeVirtualRegister });
3695}
3696
3697void BytecodeGenerator::restoreScopeRegister(int lexicalScopeIndex)
3698{
3699 if (lexicalScopeIndex == CurrentLexicalScopeIndex)
3700 return; // No change needed.
3701
3702 if (lexicalScopeIndex != OutermostLexicalScopeIndex) {
3703 ASSERT(lexicalScopeIndex < static_cast<int>(m_lexicalScopeStack.size()));
3704 int endIndex = lexicalScopeIndex + 1;
3705 for (size_t i = endIndex; i--; ) {
3706 if (m_lexicalScopeStack[i].m_scope) {
3707 move(scopeRegister(), m_lexicalScopeStack[i].m_scope);
3708 return;
3709 }
3710 }
3711 }
3712 // Note that if we don't find a local scope in the current function/program,
3713 // we must grab the outer-most scope of this bytecode generation.
3714 move(scopeRegister(), m_topMostScope);
3715}
3716
3717void BytecodeGenerator::restoreScopeRegister()
3718{
3719 restoreScopeRegister(currentLexicalScopeIndex());
3720}
3721
3722int BytecodeGenerator::labelScopeDepthToLexicalScopeIndex(int targetLabelScopeDepth)
3723{
3724 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
3725 size_t scopeDelta = labelScopeDepth() - targetLabelScopeDepth;
3726 ASSERT(scopeDelta <= m_controlFlowScopeStack.size());
3727 if (!scopeDelta)
3728 return CurrentLexicalScopeIndex;
3729
3730 ControlFlowScope& targetScope = m_controlFlowScopeStack[targetLabelScopeDepth];
3731 return targetScope.lexicalScopeIndex;
3732}
3733
3734void BytecodeGenerator::emitThrow(RegisterID* exc)
3735{
3736 m_usesExceptions = true;
3737 OpThrow::emit(this, exc);
3738}
3739
3740RegisterID* BytecodeGenerator::emitArgumentCount(RegisterID* dst)
3741{
3742 OpArgumentCount::emit(this, dst);
3743 return dst;
3744}
3745
3746unsigned BytecodeGenerator::localScopeDepth() const
3747{
3748 return m_localScopeDepth;
3749}
3750
3751int BytecodeGenerator::labelScopeDepth() const
3752{
3753 unsigned depth = localScopeDepth() + m_finallyDepth;
3754 ASSERT(depth == m_controlFlowScopeStack.size());
3755 return depth;
3756}
3757
3758void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, RegisterID* raw)
3759{
3760 RefPtr<RegisterID> message = newTemporary();
3761 emitToString(message.get(), raw);
3762 OpThrowStaticError::emit(this, message.get(), errorType);
3763}
3764
3765void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, const Identifier& message)
3766{
3767 OpThrowStaticError::emit(this, addConstantValue(addStringConstant(message)), errorType);
3768}
3769
3770void BytecodeGenerator::emitThrowReferenceError(const String& message)
3771{
3772 emitThrowStaticError(ErrorType::ReferenceError, Identifier::fromString(m_vm, message));
3773}
3774
3775void BytecodeGenerator::emitThrowTypeError(const String& message)
3776{
3777 emitThrowStaticError(ErrorType::TypeError, Identifier::fromString(m_vm, message));
3778}
3779
3780void BytecodeGenerator::emitThrowTypeError(const Identifier& message)
3781{
3782 emitThrowStaticError(ErrorType::TypeError, message);
3783}
3784
3785void BytecodeGenerator::emitThrowRangeError(const Identifier& message)
3786{
3787 emitThrowStaticError(ErrorType::RangeError, message);
3788}
3789
3790void BytecodeGenerator::emitThrowOutOfMemoryError()
3791{
3792 emitThrowStaticError(ErrorType::Error, Identifier::fromString(m_vm, "Out of memory"));
3793}
3794
3795void BytecodeGenerator::emitPushFunctionNameScope(const Identifier& property, RegisterID* callee, bool isCaptured)
3796{
3797 // There is some nuance here:
3798 // If we're in strict mode code, the function name scope variable acts exactly like a "const" variable.
3799 // If we're not in strict mode code, we want to allow bogus assignments to the name scoped variable.
3800 // This means any assignment to the variable won't throw, but it won't actually assign a new value to it.
3801 // To accomplish this, we don't report that this scope is a lexical scope. This will prevent
3802 // any throws when trying to assign to the variable (while still ensuring it keeps its original
3803 // value). There is some ugliness and exploitation of a leaky abstraction here, but it's better than
3804 // having a completely new op code and a class to handle name scopes which are so close in functionality
3805 // to lexical environments.
3806 VariableEnvironment nameScopeEnvironment;
3807 auto addResult = nameScopeEnvironment.add(property);
3808 if (isCaptured)
3809 addResult.iterator->value.setIsCaptured();
3810 addResult.iterator->value.setIsConst(); // The function name scope name acts like a const variable.
3811 unsigned numVars = m_codeBlock->m_numVars;
3812 pushLexicalScopeInternal(nameScopeEnvironment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::NotUnderTDZ, ScopeType::FunctionNameScope, ScopeRegisterType::Var);
3813 ASSERT_UNUSED(numVars, m_codeBlock->m_numVars == static_cast<int>(numVars + 1)); // Should have only created one new "var" for the function name scope.
3814 bool shouldTreatAsLexicalVariable = isStrictMode();
3815 Variable functionVar = variableForLocalEntry(property, m_lexicalScopeStack.last().m_symbolTable->get(NoLockingNecessary, property.impl()), m_lexicalScopeStack.last().m_symbolTableConstantIndex, shouldTreatAsLexicalVariable);
3816 emitPutToScope(m_lexicalScopeStack.last().m_scope, functionVar, callee, ThrowIfNotFound, InitializationMode::NotInitialization);
3817}
3818
3819void BytecodeGenerator::pushLocalControlFlowScope()
3820{
3821 ControlFlowScope scope(ControlFlowScope::Label, currentLexicalScopeIndex());
3822 m_controlFlowScopeStack.append(WTFMove(scope));
3823 m_localScopeDepth++;
3824}
3825
3826void BytecodeGenerator::popLocalControlFlowScope()
3827{
3828 ASSERT(m_controlFlowScopeStack.size());
3829 ASSERT(!m_controlFlowScopeStack.last().isFinallyScope());
3830 m_controlFlowScopeStack.removeLast();
3831 m_localScopeDepth--;
3832}
3833
3834void BytecodeGenerator::emitPushCatchScope(VariableEnvironment& environment)
3835{
3836 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::CatchScope, ScopeRegisterType::Block);
3837}
3838
3839void BytecodeGenerator::emitPopCatchScope(VariableEnvironment& environment)
3840{
3841 popLexicalScopeInternal(environment);
3842}
3843
3844void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
3845{
3846 switch (type) {
3847 case SwitchInfo::SwitchImmediate: {
3848 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3849 m_codeBlock->addSwitchJumpTable();
3850 OpSwitchImm::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3851 break;
3852 }
3853 case SwitchInfo::SwitchCharacter: {
3854 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3855 m_codeBlock->addSwitchJumpTable();
3856 OpSwitchChar::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3857 break;
3858 }
3859 case SwitchInfo::SwitchString: {
3860 size_t tableIndex = m_codeBlock->numberOfStringSwitchJumpTables();
3861 m_codeBlock->addStringSwitchJumpTable();
3862 OpSwitchString::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3863 break;
3864 }
3865 default:
3866 RELEASE_ASSERT_NOT_REACHED();
3867 }
3868
3869 SwitchInfo info = { m_lastInstruction.offset(), type };
3870 m_switchContextStack.append(info);
3871}
3872
3873static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
3874{
3875 UNUSED_PARAM(max);
3876 ASSERT(node->isNumber());
3877 double value = static_cast<NumberNode*>(node)->value();
3878 int32_t key = static_cast<int32_t>(value);
3879 ASSERT(key == value);
3880 ASSERT(key >= min);
3881 ASSERT(key <= max);
3882 return key - min;
3883}
3884
3885static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
3886{
3887 UNUSED_PARAM(max);
3888 ASSERT(node->isString());
3889 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
3890 ASSERT(clause->length() == 1);
3891
3892 int32_t key = (*clause)[0];
3893 ASSERT(key >= min);
3894 ASSERT(key <= max);
3895 return key - min;
3896}
3897
3898static void prepareJumpTableForSwitch(
3899 UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
3900 const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, int32_t min, int32_t max,
3901 int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
3902{
3903 jumpTable.min = min;
3904 jumpTable.branchOffsets.resize(max - min + 1);
3905 jumpTable.branchOffsets.fill(0);
3906 for (uint32_t i = 0; i < clauseCount; ++i) {
3907 // We're emitting this after the clause labels should have been fixed, so
3908 // the labels should not be "forward" references
3909 ASSERT(!labels[i]->isForward());
3910 jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress));
3911 }
3912}
3913
3914static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes)
3915{
3916 for (uint32_t i = 0; i < clauseCount; ++i) {
3917 // We're emitting this after the clause labels should have been fixed, so
3918 // the labels should not be "forward" references
3919 ASSERT(!labels[i]->isForward());
3920
3921 ASSERT(nodes[i]->isString());
3922 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
3923 jumpTable.offsetTable.add(clause, UnlinkedStringJumpTable::OffsetLocation { labels[i]->bind(switchAddress) });
3924 }
3925}
3926
3927void BytecodeGenerator::endSwitch(uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, Label& defaultLabel, int32_t min, int32_t max)
3928{
3929 SwitchInfo switchInfo = m_switchContextStack.last();
3930 m_switchContextStack.removeLast();
3931
3932 BoundLabel defaultTarget = defaultLabel.bind(switchInfo.bytecodeOffset);
3933 auto handleSwitch = [&](auto* op, auto bytecode) {
3934 op->setDefaultOffset(defaultTarget, [&]() {
3935 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3936 return BoundLabel();
3937 });
3938
3939 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->switchJumpTable(bytecode.m_tableIndex);
3940 prepareJumpTableForSwitch(
3941 jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
3942 switchInfo.switchType == SwitchInfo::SwitchImmediate
3943 ? keyForImmediateSwitch
3944 : keyForCharacterSwitch);
3945 };
3946
3947 auto ref = m_writer.ref(switchInfo.bytecodeOffset);
3948 switch (switchInfo.switchType) {
3949 case SwitchInfo::SwitchImmediate: {
3950 handleSwitch(ref->cast<OpSwitchImm>(), ref->as<OpSwitchImm>());
3951 break;
3952 }
3953 case SwitchInfo::SwitchCharacter: {
3954 handleSwitch(ref->cast<OpSwitchChar>(), ref->as<OpSwitchChar>());
3955 break;
3956 }
3957
3958 case SwitchInfo::SwitchString: {
3959 ref->cast<OpSwitchString>()->setDefaultOffset(defaultTarget, [&]() {
3960 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3961 return BoundLabel();
3962 });
3963
3964 UnlinkedStringJumpTable& jumpTable = m_codeBlock->stringSwitchJumpTable(ref->as<OpSwitchString>().m_tableIndex);
3965 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
3966 break;
3967 }
3968
3969 default:
3970 RELEASE_ASSERT_NOT_REACHED();
3971 break;
3972 }
3973}
3974
3975RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
3976{
3977 // It would be nice to do an even better job of identifying exactly where the expression is.
3978 // And we could make the caller pass the node pointer in, if there was some way of getting
3979 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
3980 // is still good enough to get us an accurate line number.
3981 m_expressionTooDeep = true;
3982 return newTemporary();
3983}
3984
3985bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
3986{
3987 RegisterID* registerID = variable(ident).local();
3988 if (!registerID)
3989 return false;
3990 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
3991}
3992
3993bool BytecodeGenerator::emitReadOnlyExceptionIfNeeded(const Variable& variable)
3994{
3995 // If we're in strict mode, we always throw.
3996 // If we're not in strict mode, we throw for "const" variables but not the function callee.
3997 if (isStrictMode() || variable.isConst()) {
3998 emitThrowTypeError(Identifier::fromString(m_vm, ReadonlyPropertyWriteError));
3999 return true;
4000 }
4001 return false;
4002}
4003
4004void BytecodeGenerator::emitEnumeration(ThrowableExpressionData* node, ExpressionNode* subjectNode, const ScopedLambda<void(BytecodeGenerator&, RegisterID*)>& callBack, ForOfNode* forLoopNode, RegisterID* forLoopSymbolTable)
4005{
4006 bool isForAwait = forLoopNode ? forLoopNode->isForAwait() : false;
4007 ASSERT(!isForAwait || (isForAwait && isAsyncFunctionParseMode(parseMode())));
4008
4009 RefPtr<RegisterID> subject = newTemporary();
4010 emitNode(subject.get(), subjectNode);
4011 RefPtr<RegisterID> iterator = isForAwait ? emitGetAsyncIterator(subject.get(), node) : emitGetIterator(subject.get(), node);
4012 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4013
4014 Ref<Label> loopDone = newLabel();
4015 Ref<Label> tryStartLabel = newLabel();
4016 Ref<Label> finallyViaThrowLabel = newLabel();
4017 Ref<Label> finallyLabel = newLabel();
4018 Ref<Label> catchLabel = newLabel();
4019 Ref<Label> endCatchLabel = newLabel();
4020
4021 // RefPtr<Register> iterator's lifetime must be longer than IteratorCloseContext.
4022 FinallyContext finallyContext(*this, finallyLabel.get());
4023 pushFinallyControlFlowScope(finallyContext);
4024
4025 {
4026 Ref<LabelScope> scope = newLabelScope(LabelScope::Loop);
4027 RefPtr<RegisterID> value = newTemporary();
4028 emitLoad(value.get(), jsUndefined());
4029
4030 emitJump(*scope->continueTarget());
4031
4032 Ref<Label> loopStart = newLabel();
4033 emitLabel(loopStart.get());
4034 emitLoopHint();
4035
4036 emitLabel(tryStartLabel.get());
4037 TryData* tryData = pushTry(tryStartLabel.get(), finallyViaThrowLabel.get(), HandlerType::SynthesizedFinally);
4038 callBack(*this, value.get());
4039 emitJump(*scope->continueTarget());
4040
4041 // IteratorClose sequence for abrupt completions.
4042 {
4043 // Finally block for the enumeration.
4044 emitLabel(finallyViaThrowLabel.get());
4045 popTry(tryData, finallyViaThrowLabel.get());
4046
4047 Ref<Label> finallyBodyLabel = newLabel();
4048 RefPtr<RegisterID> finallyExceptionRegister = newTemporary();
4049
4050 emitOutOfLineFinallyHandler(finallyContext.completionValueRegister(), finallyContext.completionTypeRegister(), tryData);
4051 move(finallyExceptionRegister.get(), finallyContext.completionValueRegister());
4052 emitJump(finallyBodyLabel.get());
4053
4054 emitLabel(finallyLabel.get());
4055 moveEmptyValue(finallyExceptionRegister.get());
4056
4057 // Finally fall through case.
4058 emitLabel(finallyBodyLabel.get());
4059 restoreScopeRegister();
4060
4061 Ref<Label> finallyDone = newLabel();
4062
4063 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4064 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), finallyDone.get());
4065
4066 Ref<Label> returnCallTryStart = newLabel();
4067 emitLabel(returnCallTryStart.get());
4068 TryData* returnCallTryData = pushTry(returnCallTryStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
4069
4070 CallArguments returnArguments(*this, nullptr);
4071 move(returnArguments.thisRegister(), iterator.get());
4072 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4073
4074 if (isForAwait)
4075 emitAwait(value.get());
4076
4077 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), finallyDone.get());
4078 emitThrowTypeError("Iterator result interface is not an object."_s);
4079
4080 emitLabel(finallyDone.get());
4081 emitFinallyCompletion(finallyContext, endCatchLabel.get());
4082
4083 popTry(returnCallTryData, finallyDone.get());
4084
4085 // Catch block for exceptions that may be thrown while calling the return
4086 // handler in the enumeration finally block. The only reason we need this
4087 // catch block is because if entered the above finally block due to a thrown
4088 // exception, then we want to re-throw the original exception on exiting
4089 // the finally block. Otherwise, we'll let any new exception pass through.
4090 {
4091 emitLabel(catchLabel.get());
4092
4093 RefPtr<RegisterID> exceptionRegister = newTemporary();
4094 emitOutOfLineFinallyHandler(exceptionRegister.get(), finallyContext.completionTypeRegister(), returnCallTryData);
4095 // Since this is a synthesized catch block and we're guaranteed to never need
4096 // to resolve any symbols from the scope, we can skip restoring the scope
4097 // register here.
4098
4099 Ref<Label> throwLabel = newLabel();
4100 emitJumpIfTrue(emitIsEmpty(newTemporary(), finallyExceptionRegister.get()), throwLabel.get());
4101 move(exceptionRegister.get(), finallyExceptionRegister.get());
4102
4103 emitLabel(throwLabel.get());
4104 emitThrow(exceptionRegister.get());
4105
4106 emitLabel(endCatchLabel.get());
4107 }
4108 }
4109
4110 emitLabel(*scope->continueTarget());
4111 if (forLoopNode) {
4112 RELEASE_ASSERT(forLoopNode->isForOfNode());
4113 prepareLexicalScopeForNextForLoopIteration(forLoopNode, forLoopSymbolTable);
4114 emitDebugHook(forLoopNode->lexpr());
4115 }
4116
4117 {
4118 emitIteratorNext(value.get(), nextMethod.get(), iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4119
4120 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4121 emitGetById(value.get(), value.get(), propertyNames().value);
4122 emitJump(loopStart.get());
4123 }
4124
4125 bool breakLabelIsBound = scope->breakTargetMayBeBound();
4126 if (breakLabelIsBound)
4127 emitLabel(scope->breakTarget());
4128 popFinallyControlFlowScope();
4129 if (breakLabelIsBound) {
4130 // IteratorClose sequence for break-ed control flow.
4131 emitIteratorClose(iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4132 }
4133 }
4134 emitLabel(loopDone.get());
4135}
4136
4137RegisterID* BytecodeGenerator::emitGetTemplateObject(RegisterID* dst, TaggedTemplateNode* taggedTemplate)
4138{
4139 TemplateObjectDescriptor::StringVector rawStrings;
4140 TemplateObjectDescriptor::OptionalStringVector cookedStrings;
4141
4142 TemplateStringListNode* templateString = taggedTemplate->templateLiteral()->templateStrings();
4143 for (; templateString; templateString = templateString->next()) {
4144 auto* string = templateString->value();
4145 ASSERT(string->raw());
4146 rawStrings.append(string->raw()->impl());
4147 if (!string->cooked())
4148 cookedStrings.append(WTF::nullopt);
4149 else
4150 cookedStrings.append(string->cooked()->impl());
4151 }
4152 RefPtr<RegisterID> constant = addTemplateObjectConstant(TemplateObjectDescriptor::create(WTFMove(rawStrings), WTFMove(cookedStrings)), taggedTemplate->endOffset());
4153 if (!dst)
4154 return constant.get();
4155 return move(dst, constant.get());
4156}
4157
4158RegisterID* BytecodeGenerator::emitGetGlobalPrivate(RegisterID* dst, const Identifier& property)
4159{
4160 dst = tempDestination(dst);
4161 Variable var = variable(property);
4162 if (RegisterID* local = var.local())
4163 return move(dst, local);
4164
4165 RefPtr<RegisterID> scope = newTemporary();
4166 move(scope.get(), emitResolveScope(scope.get(), var));
4167 return emitGetFromScope(dst, scope.get(), var, ThrowIfNotFound);
4168}
4169
4170RegisterID* BytecodeGenerator::emitGetEnumerableLength(RegisterID* dst, RegisterID* base)
4171{
4172 OpGetEnumerableLength::emit(this, dst, base);
4173 return dst;
4174}
4175
4176RegisterID* BytecodeGenerator::emitHasGenericProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4177{
4178 OpHasGenericProperty::emit(this, dst, base, propertyName);
4179 return dst;
4180}
4181
4182RegisterID* BytecodeGenerator::emitHasIndexedProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4183{
4184 OpHasIndexedProperty::emit(this, dst, base, propertyName);
4185 return dst;
4186}
4187
4188RegisterID* BytecodeGenerator::emitHasStructureProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName, RegisterID* enumerator)
4189{
4190 OpHasStructureProperty::emit(this, dst, base, propertyName, enumerator);
4191 return dst;
4192}
4193
4194RegisterID* BytecodeGenerator::emitGetPropertyEnumerator(RegisterID* dst, RegisterID* base)
4195{
4196 OpGetPropertyEnumerator::emit(this, dst, base);
4197 return dst;
4198}
4199
4200RegisterID* BytecodeGenerator::emitEnumeratorStructurePropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4201{
4202 OpEnumeratorStructurePname::emit(this, dst, enumerator, index);
4203 return dst;
4204}
4205
4206RegisterID* BytecodeGenerator::emitEnumeratorGenericPropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4207{
4208 OpEnumeratorGenericPname::emit(this, dst, enumerator, index);
4209 return dst;
4210}
4211
4212RegisterID* BytecodeGenerator::emitToIndexString(RegisterID* dst, RegisterID* index)
4213{
4214 OpToIndexString::emit(this, dst, index);
4215 return dst;
4216}
4217
4218RegisterID* BytecodeGenerator::emitIsCellWithType(RegisterID* dst, RegisterID* src, JSType type)
4219{
4220 OpIsCellWithType::emit(this, dst, src, type);
4221 return dst;
4222}
4223
4224RegisterID* BytecodeGenerator::emitIsObject(RegisterID* dst, RegisterID* src)
4225{
4226 OpIsObject::emit(this, dst, src);
4227 return dst;
4228}
4229
4230RegisterID* BytecodeGenerator::emitIsNumber(RegisterID* dst, RegisterID* src)
4231{
4232 OpIsNumber::emit(this, dst, src);
4233 return dst;
4234}
4235
4236RegisterID* BytecodeGenerator::emitIsUndefined(RegisterID* dst, RegisterID* src)
4237{
4238 OpIsUndefined::emit(this, dst, src);
4239 return dst;
4240}
4241
4242RegisterID* BytecodeGenerator::emitIsUndefinedOrNull(RegisterID* dst, RegisterID* src)
4243{
4244 OpIsUndefinedOrNull::emit(this, dst, src);
4245 return dst;
4246}
4247
4248RegisterID* BytecodeGenerator::emitIsEmpty(RegisterID* dst, RegisterID* src)
4249{
4250 OpIsEmpty::emit(this, dst, src);
4251 return dst;
4252}
4253
4254RegisterID* BytecodeGenerator::emitIteratorNext(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4255{
4256 {
4257 CallArguments nextArguments(*this, nullptr);
4258 move(nextArguments.thisRegister(), iterator);
4259 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4260
4261 if (doEmitAwait == EmitAwait::Yes)
4262 emitAwait(dst);
4263 }
4264 {
4265 Ref<Label> typeIsObject = newLabel();
4266 emitJumpIfTrue(emitIsObject(newTemporary(), dst), typeIsObject.get());
4267 emitThrowTypeError("Iterator result interface is not an object."_s);
4268 emitLabel(typeIsObject.get());
4269 }
4270 return dst;
4271}
4272
4273RegisterID* BytecodeGenerator::emitIteratorNextWithValue(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, RegisterID* value, const ThrowableExpressionData* node)
4274{
4275 {
4276 CallArguments nextArguments(*this, nullptr, 1);
4277 move(nextArguments.thisRegister(), iterator);
4278 move(nextArguments.argumentRegister(0), value);
4279 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4280 }
4281
4282 return dst;
4283}
4284
4285void BytecodeGenerator::emitIteratorClose(RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4286{
4287 Ref<Label> done = newLabel();
4288 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator, propertyNames().returnKeyword);
4289 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), done.get());
4290
4291 RefPtr<RegisterID> value = newTemporary();
4292 CallArguments returnArguments(*this, nullptr);
4293 move(returnArguments.thisRegister(), iterator);
4294 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4295
4296 if (doEmitAwait == EmitAwait::Yes)
4297 emitAwait(value.get());
4298
4299 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), done.get());
4300 emitThrowTypeError("Iterator result interface is not an object."_s);
4301 emitLabel(done.get());
4302}
4303
4304void BytecodeGenerator::pushIndexedForInScope(RegisterID* localRegister, RegisterID* indexRegister)
4305{
4306 if (!localRegister)
4307 return;
4308 unsigned bodyBytecodeStartOffset = instructions().size();
4309 m_forInContextStack.append(adoptRef(*new IndexedForInContext(localRegister, indexRegister, bodyBytecodeStartOffset)));
4310}
4311
4312void BytecodeGenerator::popIndexedForInScope(RegisterID* localRegister)
4313{
4314 if (!localRegister)
4315 return;
4316 unsigned bodyBytecodeEndOffset = instructions().size();
4317 m_forInContextStack.last()->asIndexedForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4318 m_forInContextStack.removeLast();
4319}
4320
4321RegisterID* BytecodeGenerator::emitLoadArrowFunctionLexicalEnvironment(const Identifier& identifier)
4322{
4323 ASSERT(m_codeBlock->isArrowFunction() || m_codeBlock->isArrowFunctionContext() || constructorKind() == ConstructorKind::Extends || m_codeType == EvalCode);
4324
4325 return emitResolveScope(nullptr, variable(identifier, ThisResolutionType::Scoped));
4326}
4327
4328void BytecodeGenerator::emitLoadThisFromArrowFunctionLexicalEnvironment()
4329{
4330 emitGetFromScope(thisRegister(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier), variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped), DoNotThrowIfNotFound);
4331}
4332
4333RegisterID* BytecodeGenerator::emitLoadNewTargetFromArrowFunctionLexicalEnvironment()
4334{
4335 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4336
4337 return emitGetFromScope(m_newTargetRegister, emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().newTargetLocalPrivateName()), newTargetVar, ThrowIfNotFound);
4338
4339}
4340
4341RegisterID* BytecodeGenerator::emitLoadDerivedConstructorFromArrowFunctionLexicalEnvironment()
4342{
4343 Variable protoScopeVar = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4344 return emitGetFromScope(newTemporary(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().derivedConstructorPrivateName()), protoScopeVar, ThrowIfNotFound);
4345}
4346
4347RegisterID* BytecodeGenerator::ensureThis()
4348{
4349 if (constructorKind() == ConstructorKind::Extends || isDerivedConstructorContext()) {
4350 if ((needsToUpdateArrowFunctionContext() && isSuperCallUsedInInnerArrowFunction()) || m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionBodyMode)
4351 emitLoadThisFromArrowFunctionLexicalEnvironment();
4352
4353 emitTDZCheck(thisRegister());
4354 }
4355
4356 return thisRegister();
4357}
4358
4359bool BytecodeGenerator::isThisUsedInInnerArrowFunction()
4360{
4361 return m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4362}
4363
4364bool BytecodeGenerator::isArgumentsUsedInInnerArrowFunction()
4365{
4366 return m_scopeNode->doAnyInnerArrowFunctionsUseArguments() || m_scopeNode->doAnyInnerArrowFunctionsUseEval();
4367}
4368
4369bool BytecodeGenerator::isNewTargetUsedInInnerArrowFunction()
4370{
4371 return m_scopeNode->doAnyInnerArrowFunctionsUseNewTarget() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4372}
4373
4374bool BytecodeGenerator::isSuperUsedInInnerArrowFunction()
4375{
4376 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4377}
4378
4379bool BytecodeGenerator::isSuperCallUsedInInnerArrowFunction()
4380{
4381 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4382}
4383
4384void BytecodeGenerator::emitPutNewTargetToArrowFunctionContextScope()
4385{
4386 if (isNewTargetUsedInInnerArrowFunction()) {
4387 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4388
4389 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4390 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, newTargetVar, newTarget(), DoNotThrowIfNotFound, InitializationMode::Initialization);
4391 }
4392}
4393
4394void BytecodeGenerator::emitPutDerivedConstructorToArrowFunctionContextScope()
4395{
4396 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
4397 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4398
4399 Variable protoScope = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4400 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, protoScope, &m_calleeRegister, DoNotThrowIfNotFound, InitializationMode::Initialization);
4401 }
4402}
4403
4404void BytecodeGenerator::emitPutThisToArrowFunctionContextScope()
4405{
4406 if (isThisUsedInInnerArrowFunction() || (m_scopeNode->usesSuperCall() && m_codeType == EvalCode)) {
4407 ASSERT(isDerivedConstructorContext() || m_arrowFunctionContextLexicalEnvironmentRegister != nullptr);
4408
4409 Variable thisVar = variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped);
4410 RegisterID* scope = isDerivedConstructorContext() ? emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier) : m_arrowFunctionContextLexicalEnvironmentRegister;
4411
4412 emitPutToScope(scope, thisVar, thisRegister(), ThrowIfNotFound, InitializationMode::NotInitialization);
4413 }
4414}
4415
4416void BytecodeGenerator::pushStructureForInScope(RegisterID* localRegister, RegisterID* indexRegister, RegisterID* propertyRegister, RegisterID* enumeratorRegister)
4417{
4418 if (!localRegister)
4419 return;
4420 unsigned bodyBytecodeStartOffset = instructions().size();
4421 m_forInContextStack.append(adoptRef(*new StructureForInContext(localRegister, indexRegister, propertyRegister, enumeratorRegister, bodyBytecodeStartOffset)));
4422}
4423
4424void BytecodeGenerator::popStructureForInScope(RegisterID* localRegister)
4425{
4426 if (!localRegister)
4427 return;
4428 unsigned bodyBytecodeEndOffset = instructions().size();
4429 m_forInContextStack.last()->asStructureForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4430 m_forInContextStack.removeLast();
4431}
4432
4433RegisterID* BytecodeGenerator::emitRestParameter(RegisterID* result, unsigned numParametersToSkip)
4434{
4435 RefPtr<RegisterID> restArrayLength = newTemporary();
4436 OpGetRestLength::emit(this, restArrayLength.get(), numParametersToSkip);
4437
4438 OpCreateRest::emit(this, result, restArrayLength.get(), numParametersToSkip);
4439
4440 return result;
4441}
4442
4443void BytecodeGenerator::emitRequireObjectCoercible(RegisterID* value, const String& error)
4444{
4445 // FIXME: op_jneq_null treats "undetectable" objects as null/undefined. RequireObjectCoercible
4446 // thus incorrectly throws a TypeError for interfaces like HTMLAllCollection.
4447 Ref<Label> target = newLabel();
4448 OpJneqNull::emit(this, value, target->bind(this));
4449 emitThrowTypeError(error);
4450 emitLabel(target.get());
4451}
4452
4453void BytecodeGenerator::emitYieldPoint(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4454{
4455 Ref<Label> mergePoint = newLabel();
4456 unsigned yieldPointIndex = m_yieldPoints++;
4457 emitGeneratorStateChange(yieldPointIndex + 1);
4458
4459 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode) {
4460 int suspendReason = static_cast<int32_t>(result);
4461 emitPutById(generatorRegister(), propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(suspendReason)));
4462 }
4463
4464 // Split the try range here.
4465 Ref<Label> savePoint = newEmittedLabel();
4466 for (unsigned i = m_tryContextStack.size(); i--;) {
4467 TryContext& context = m_tryContextStack[i];
4468 m_tryRanges.append(TryRange {
4469 context.start.copyRef(),
4470 savePoint.copyRef(),
4471 context.tryData
4472 });
4473 // Try range will be restared at the merge point.
4474 context.start = mergePoint.get();
4475 }
4476 Vector<TryContext> savedTryContextStack;
4477 m_tryContextStack.swap(savedTryContextStack);
4478
4479
4480#if CPU(NEEDS_ALIGNED_ACCESS)
4481 // conservatively align for the bytecode rewriter: it will delete this yield and
4482 // append a fragment, so we make sure that the start of the fragments is aligned
4483 while (m_writer.position() % OpcodeSize::Wide)
4484 OpNop::emit<OpcodeSize::Narrow>(this);
4485#endif
4486 OpYield::emit(this, generatorFrameRegister(), yieldPointIndex, argument);
4487
4488 // Restore the try contexts, which start offset is updated to the merge point.
4489 m_tryContextStack.swap(savedTryContextStack);
4490 emitLabel(mergePoint.get());
4491}
4492
4493RegisterID* BytecodeGenerator::emitYield(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4494{
4495 emitYieldPoint(argument, result);
4496
4497 Ref<Label> normalLabel = newLabel();
4498 RefPtr<RegisterID> condition = newTemporary();
4499 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4500 emitJumpIfTrue(condition.get(), normalLabel.get());
4501
4502 Ref<Label> throwLabel = newLabel();
4503 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ThrowMode))));
4504 emitJumpIfTrue(condition.get(), throwLabel.get());
4505 // Return.
4506 {
4507 RefPtr<RegisterID> returnRegister = generatorValueRegister();
4508 bool hasFinally = emitReturnViaFinallyIfNeeded(returnRegister.get());
4509 if (!hasFinally)
4510 emitReturn(returnRegister.get());
4511 }
4512
4513 // Throw.
4514 emitLabel(throwLabel.get());
4515 emitThrow(generatorValueRegister());
4516
4517 // Normal.
4518 emitLabel(normalLabel.get());
4519 return generatorValueRegister();
4520}
4521
4522RegisterID* BytecodeGenerator::emitCallIterator(RegisterID* iterator, RegisterID* argument, ThrowableExpressionData* node)
4523{
4524 CallArguments args(*this, nullptr);
4525 move(args.thisRegister(), argument);
4526 emitCall(iterator, iterator, NoExpectedFunction, args, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4527
4528 return iterator;
4529}
4530
4531void BytecodeGenerator::emitAwait(RegisterID* value)
4532{
4533 emitYield(value, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Await);
4534 move(value, generatorValueRegister());
4535}
4536
4537RegisterID* BytecodeGenerator::emitGetIterator(RegisterID* argument, ThrowableExpressionData* node)
4538{
4539 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().iteratorSymbol);
4540 emitCallIterator(iterator.get(), argument, node);
4541
4542 return iterator.get();
4543}
4544
4545RegisterID* BytecodeGenerator::emitGetAsyncIterator(RegisterID* argument, ThrowableExpressionData* node)
4546{
4547 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().asyncIteratorSymbol);
4548 Ref<Label> asyncIteratorNotFound = newLabel();
4549 Ref<Label> asyncIteratorFound = newLabel();
4550 Ref<Label> iteratorReceived = newLabel();
4551
4552 emitJumpIfTrue(emitUnaryOp<OpEqNull>(newTemporary(), iterator.get()), asyncIteratorNotFound.get());
4553
4554 emitJump(asyncIteratorFound.get());
4555 emitLabel(asyncIteratorNotFound.get());
4556
4557 RefPtr<RegisterID> commonIterator = emitGetIterator(argument, node);
4558 move(iterator.get(), commonIterator.get());
4559
4560 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4561
4562 auto varCreateAsyncFromSyncIterator = variable(propertyNames().builtinNames().createAsyncFromSyncIteratorPrivateName());
4563 RefPtr<RegisterID> scope = newTemporary();
4564 move(scope.get(), emitResolveScope(scope.get(), varCreateAsyncFromSyncIterator));
4565 RefPtr<RegisterID> createAsyncFromSyncIterator = emitGetFromScope(newTemporary(), scope.get(), varCreateAsyncFromSyncIterator, ThrowIfNotFound);
4566
4567 CallArguments args(*this, nullptr, 2);
4568 emitLoad(args.thisRegister(), jsUndefined());
4569
4570 move(args.argumentRegister(0), iterator.get());
4571 move(args.argumentRegister(1), nextMethod.get());
4572
4573 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
4574 emitCall(iterator.get(), createAsyncFromSyncIterator.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
4575
4576 emitJump(iteratorReceived.get());
4577
4578 emitLabel(asyncIteratorFound.get());
4579 emitCallIterator(iterator.get(), argument, node);
4580 emitLabel(iteratorReceived.get());
4581
4582 return iterator.get();
4583}
4584
4585RegisterID* BytecodeGenerator::emitDelegateYield(RegisterID* argument, ThrowableExpressionData* node)
4586{
4587 RefPtr<RegisterID> value = newTemporary();
4588 {
4589 RefPtr<RegisterID> iterator = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? emitGetAsyncIterator(argument, node) : emitGetIterator(argument, node);
4590 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4591
4592 Ref<Label> loopDone = newLabel();
4593 {
4594 Ref<Label> nextElement = newLabel();
4595 emitLoad(value.get(), jsUndefined());
4596
4597 emitJump(nextElement.get());
4598
4599 Ref<Label> loopStart = newLabel();
4600 emitLabel(loopStart.get());
4601 emitLoopHint();
4602
4603 Ref<Label> branchOnResult = newLabel();
4604 {
4605 emitYieldPoint(value.get(), JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Yield);
4606
4607 Ref<Label> normalLabel = newLabel();
4608 Ref<Label> returnLabel = newLabel();
4609 {
4610 RefPtr<RegisterID> condition = newTemporary();
4611 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4612 emitJumpIfTrue(condition.get(), normalLabel.get());
4613
4614 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ReturnMode))));
4615 emitJumpIfTrue(condition.get(), returnLabel.get());
4616
4617 // Fallthrough to ThrowMode.
4618 }
4619
4620 // Throw.
4621 {
4622 Ref<Label> throwMethodFound = newLabel();
4623 RefPtr<RegisterID> throwMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().throwKeyword);
4624 emitJumpIfFalse(emitIsUndefined(newTemporary(), throwMethod.get()), throwMethodFound.get());
4625
4626 EmitAwait emitAwaitInIteratorClose = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? EmitAwait::Yes : EmitAwait::No;
4627 emitIteratorClose(iterator.get(), node, emitAwaitInIteratorClose);
4628
4629 emitThrowTypeError("Delegated generator does not have a 'throw' method."_s);
4630
4631 emitLabel(throwMethodFound.get());
4632 CallArguments throwArguments(*this, nullptr, 1);
4633 move(throwArguments.thisRegister(), iterator.get());
4634 move(throwArguments.argumentRegister(0), generatorValueRegister());
4635 emitCall(value.get(), throwMethod.get(), NoExpectedFunction, throwArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4636
4637 emitJump(branchOnResult.get());
4638 }
4639
4640 // Return.
4641 emitLabel(returnLabel.get());
4642 {
4643 Ref<Label> returnMethodFound = newLabel();
4644 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4645 emitJumpIfFalse(emitIsUndefined(newTemporary(), returnMethod.get()), returnMethodFound.get());
4646
4647 move(value.get(), generatorValueRegister());
4648
4649 Ref<Label> returnSequence = newLabel();
4650 emitJump(returnSequence.get());
4651
4652 emitLabel(returnMethodFound.get());
4653 CallArguments returnArguments(*this, nullptr, 1);
4654 move(returnArguments.thisRegister(), iterator.get());
4655 move(returnArguments.argumentRegister(0), generatorValueRegister());
4656 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4657
4658 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4659 emitAwait(value.get());
4660
4661 Ref<Label> returnIteratorResultIsObject = newLabel();
4662 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), returnIteratorResultIsObject.get());
4663 emitThrowTypeError("Iterator result interface is not an object."_s);
4664
4665 emitLabel(returnIteratorResultIsObject.get());
4666
4667 Ref<Label> returnFromGenerator = newLabel();
4668 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), returnFromGenerator.get());
4669
4670 emitGetById(value.get(), value.get(), propertyNames().value);
4671 emitJump(loopStart.get());
4672
4673 emitLabel(returnFromGenerator.get());
4674 emitGetById(value.get(), value.get(), propertyNames().value);
4675
4676 emitLabel(returnSequence.get());
4677 bool hasFinally = emitReturnViaFinallyIfNeeded(value.get());
4678 if (!hasFinally)
4679 emitReturn(value.get());
4680 }
4681
4682 // Normal.
4683 emitLabel(normalLabel.get());
4684 move(value.get(), generatorValueRegister());
4685 }
4686
4687 emitLabel(nextElement.get());
4688 emitIteratorNextWithValue(value.get(), nextMethod.get(), iterator.get(), value.get(), node);
4689
4690 emitLabel(branchOnResult.get());
4691
4692 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4693 emitAwait(value.get());
4694
4695 Ref<Label> iteratorValueIsObject = newLabel();
4696 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), iteratorValueIsObject.get());
4697 emitThrowTypeError("Iterator result interface is not an object."_s);
4698 emitLabel(iteratorValueIsObject.get());
4699
4700 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4701 emitGetById(value.get(), value.get(), propertyNames().value);
4702
4703 emitJump(loopStart.get());
4704 }
4705 emitLabel(loopDone.get());
4706 }
4707
4708 emitGetById(value.get(), value.get(), propertyNames().value);
4709 return value.get();
4710}
4711
4712
4713void BytecodeGenerator::emitGeneratorStateChange(int32_t state)
4714{
4715 RegisterID* completedState = emitLoad(nullptr, jsNumber(state));
4716 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorStatePrivateName(), completedState);
4717}
4718
4719bool BytecodeGenerator::emitJumpViaFinallyIfNeeded(int targetLabelScopeDepth, Label& jumpTarget)
4720{
4721 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
4722 size_t numberOfScopesToCheckForFinally = labelScopeDepth() - targetLabelScopeDepth;
4723 ASSERT(numberOfScopesToCheckForFinally <= m_controlFlowScopeStack.size());
4724 if (!numberOfScopesToCheckForFinally)
4725 return false;
4726
4727 FinallyContext* innermostFinallyContext = nullptr;
4728 FinallyContext* outermostFinallyContext = nullptr;
4729 size_t scopeIndex = m_controlFlowScopeStack.size() - 1;
4730 while (numberOfScopesToCheckForFinally--) {
4731 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex--];
4732 if (scope->isFinallyScope()) {
4733 FinallyContext* finallyContext = scope->finallyContext;
4734 if (!innermostFinallyContext)
4735 innermostFinallyContext = finallyContext;
4736 outermostFinallyContext = finallyContext;
4737 finallyContext->incNumberOfBreaksOrContinues();
4738 }
4739 }
4740 if (!outermostFinallyContext)
4741 return false; // No finallys to thread through.
4742
4743 auto jumpID = bytecodeOffsetToJumpID(instructions().size());
4744 int lexicalScopeIndex = labelScopeDepthToLexicalScopeIndex(targetLabelScopeDepth);
4745 outermostFinallyContext->registerJump(jumpID, lexicalScopeIndex, jumpTarget);
4746
4747 emitLoad(innermostFinallyContext->completionTypeRegister(), jumpID);
4748 emitJump(*innermostFinallyContext->finallyLabel());
4749 return true; // We'll be jumping to a finally block.
4750}
4751
4752bool BytecodeGenerator::emitReturnViaFinallyIfNeeded(RegisterID* returnRegister)
4753{
4754 size_t numberOfScopesToCheckForFinally = m_controlFlowScopeStack.size();
4755 if (!numberOfScopesToCheckForFinally)
4756 return false;
4757
4758 FinallyContext* innermostFinallyContext = nullptr;
4759 while (numberOfScopesToCheckForFinally) {
4760 size_t scopeIndex = --numberOfScopesToCheckForFinally;
4761 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex];
4762 if (scope->isFinallyScope()) {
4763 FinallyContext* finallyContext = scope->finallyContext;
4764 if (!innermostFinallyContext)
4765 innermostFinallyContext = finallyContext;
4766 finallyContext->setHandlesReturns();
4767 }
4768 }
4769 if (!innermostFinallyContext)
4770 return false; // No finallys to thread through.
4771
4772 emitLoad(innermostFinallyContext->completionTypeRegister(), CompletionType::Return);
4773 move(innermostFinallyContext->completionValueRegister(), returnRegister);
4774 emitJump(*innermostFinallyContext->finallyLabel());
4775 return true; // We'll be jumping to a finally block.
4776}
4777
4778void BytecodeGenerator::emitFinallyCompletion(FinallyContext& context, Label& normalCompletionLabel)
4779{
4780 if (context.numberOfBreaksOrContinues() || context.handlesReturns()) {
4781 emitJumpIf<OpStricteq>(context.completionTypeRegister(), CompletionType::Normal, normalCompletionLabel);
4782
4783 FinallyContext* outerContext = context.outerContext();
4784
4785 size_t numberOfJumps = context.numberOfJumps();
4786 ASSERT(outerContext || numberOfJumps == context.numberOfBreaksOrContinues());
4787
4788 // Handle Break or Continue completions that jumps into this FinallyContext.
4789 for (size_t i = 0; i < numberOfJumps; i++) {
4790 Ref<Label> nextLabel = newLabel();
4791 auto& jump = context.jumps(i);
4792 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), jump.jumpID, nextLabel.get());
4793
4794 // This case is for Break / Continue completions from an inner finally context
4795 // with a jump target that is not beyond the next outer finally context:
4796 //
4797 // try {
4798 // for (... stuff ...) {
4799 // try {
4800 // continue; // Sets completionType to jumpID of top of the for loop.
4801 // } finally {
4802 // } // Jump to top of the for loop on completion.
4803 // }
4804 // } finally {
4805 // }
4806 //
4807 // Since the jumpID is targetting a label that is inside the outer finally context,
4808 // we can jump to it directly on completion of this finally context: there is no intermediate
4809 // finally blocks to run. After the Break / Continue, we will contnue execution as normal.
4810 // So, we'll set the completionType to Normal (on behalf of the target) before we jump.
4811 // We can also set the completion value to undefined, but it will never be used for normal
4812 // completion anyway. So, we'll skip setting it.
4813
4814 restoreScopeRegister(jump.targetLexicalScopeIndex);
4815 emitLoad(context.completionTypeRegister(), CompletionType::Normal);
4816 emitJump(jump.targetLabel.get());
4817
4818 emitLabel(nextLabel.get());
4819 }
4820
4821 // Handle completions that take us out of this FinallyContext.
4822 if (outerContext) {
4823 if (context.handlesReturns()) {
4824 Ref<Label> isNotReturnLabel = newLabel();
4825 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, isNotReturnLabel.get());
4826
4827 // This case is for Return completion from an inner finally context:
4828 //
4829 // try {
4830 // try {
4831 // return result; // Sets completionType to Return, and completionValue to result.
4832 // } finally {
4833 // } // Jump to outer finally on completion.
4834 // } finally {
4835 // }
4836 //
4837 // Since we know there's at least one outer finally context (beyond the current context),
4838 // we cannot actually return from here. Instead, we pass the completionType and completionValue
4839 // on to the next outer finally, and let it decide what to do next on its completion. The
4840 // outer finally may or may not actual return depending on whether it encounters an abrupt
4841 // completion in its body that overrrides this Return completion.
4842
4843 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4844 move(outerContext->completionValueRegister(), context.completionValueRegister());
4845 emitJump(*outerContext->finallyLabel());
4846
4847 emitLabel(isNotReturnLabel.get());
4848 }
4849
4850 bool hasBreaksOrContinuesThatEscapeCurrentFinally = context.numberOfBreaksOrContinues() > numberOfJumps;
4851 if (hasBreaksOrContinuesThatEscapeCurrentFinally) {
4852 Ref<Label> isThrowOrNormalLabel = newLabel();
4853 emitJumpIf<OpBeloweq>(context.completionTypeRegister(), CompletionType::Throw, isThrowOrNormalLabel.get());
4854
4855 // A completionType above Throw means we have a Break or Continue encoded as a jumpID.
4856 // We already ruled out Return above.
4857 static_assert(CompletionType::Throw < CompletionType::Return && CompletionType::Throw < CompletionType::Return, "jumpIDs are above CompletionType::Return");
4858
4859 // This case is for Break / Continue completions in an inner finally context:
4860 //
4861 // 10: label:
4862 // 11: try {
4863 // 12: try {
4864 // 13: for (... stuff ...)
4865 // 14: break label; // Sets completionType to jumpID of label.
4866 // 15: } finally {
4867 // 16: } // Jumps to outer finally on completion.
4868 // 17: } finally {
4869 // 18: }
4870 //
4871 // The break (line 14) says to continue execution at the label at line 10. Before we can
4872 // goto line 10, the inner context's finally (line 15) needs to be run, followed by the
4873 // outer context's finally (line 17). 'outerContext' being non-null above tells us that
4874 // there is at least one outer finally context that we need to run after we complete the
4875 // current finally. Note that unless the body of the outer finally abruptly completes in a
4876 // different way, that outer finally also needs to complete with a Break / Continue to
4877 // the same target label. Hence, we need to pass the jumpID in this finally's completionTypeRegister
4878 // to the outer finally. The completion value for Break and Continue according to the spec
4879 // is undefined, but it won't ever be used. So, we'll skip setting it.
4880 //
4881 // Note that all we're doing here is passing the Break / Continue completion to the next
4882 // outer finally context. We don't worry about finally contexts beyond that. It is the
4883 // responsibility of the next outer finally to determine what to do next at its completion,
4884 // and pass on to the next outer context if present and needed.
4885
4886 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4887 emitJump(*outerContext->finallyLabel());
4888
4889 emitLabel(isThrowOrNormalLabel.get());
4890 }
4891
4892 } else {
4893 // We are the outermost finally.
4894 if (context.handlesReturns()) {
4895 Ref<Label> notReturnLabel = newLabel();
4896 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, notReturnLabel.get());
4897
4898 // This case is for Return completion from the outermost finally context:
4899 //
4900 // try {
4901 // return result; // Sets completionType to Return, and completionValue to result.
4902 // } finally {
4903 // } // Executes the return of the completionValue.
4904 //
4905 // Since we know there's no outer finally context (beyond the current context) to run,
4906 // we can actually execute a return for this Return completion. The value to return
4907 // is whatever is in the completionValueRegister.
4908
4909 emitWillLeaveCallFrameDebugHook();
4910 emitReturn(context.completionValueRegister(), ReturnFrom::Finally);
4911
4912 emitLabel(notReturnLabel.get());
4913 }
4914 }
4915 }
4916
4917 // By now, we've rule out all Break / Continue / Return completions above. The only remaining
4918 // possibilities are Normal or Throw.
4919
4920 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Throw, normalCompletionLabel);
4921
4922 // We get here because we entered this finally context with Throw completionType (i.e. we have
4923 // an exception that we need to rethrow), and we didn't encounter a different abrupt completion
4924 // that overrides that incoming completionType. All we have to do here is re-throw the exception
4925 // captured in the completionValue.
4926 //
4927 // Note that unlike for Break / Continue / Return, we don't need to worry about outer finally
4928 // contexts. This is because any outer finally context (if present) will have its own exception
4929 // handler, which will take care of receiving the Throw completion, and re-capturing the exception
4930 // in its completionValue.
4931
4932 emitThrow(context.completionValueRegister());
4933}
4934
4935template<typename CompareOp>
4936void BytecodeGenerator::emitJumpIf(RegisterID* completionTypeRegister, CompletionType type, Label& jumpTarget)
4937{
4938 RefPtr<RegisterID> tempRegister = newTemporary();
4939 RegisterID* valueConstant = addConstantValue(jsNumber(static_cast<int>(type)));
4940 OperandTypes operandTypes = OperandTypes(ResultType::numberTypeIsInt32(), ResultType::unknownType());
4941
4942 auto equivalenceResult = emitBinaryOp<CompareOp>(tempRegister.get(), completionTypeRegister, valueConstant, operandTypes);
4943 emitJumpIfTrue(equivalenceResult, jumpTarget);
4944}
4945
4946void ForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4947{
4948 // Lexically invalidating ForInContexts is kind of weak sauce, but it only occurs if
4949 // either of the following conditions is true:
4950 //
4951 // (1) The loop iteration variable is re-assigned within the body of the loop.
4952 // (2) The loop iteration variable is captured in the lexical scope of the function.
4953 //
4954 // These two situations occur sufficiently rarely that it's okay to use this style of
4955 // "analysis" to make iteration faster. If we didn't want to do this, we would either have
4956 // to perform some flow-sensitive analysis to see if/when the loop iteration variable was
4957 // reassigned, or we'd have to resort to runtime checks to see if the variable had been
4958 // reassigned from its original value.
4959
4960 for (unsigned offset = bodyBytecodeStartOffset(); isValid() && offset < bodyBytecodeEndOffset;) {
4961 auto instruction = generator.instructions().at(offset);
4962 OpcodeID opcodeID = instruction->opcodeID();
4963
4964 ASSERT(opcodeID != op_enter);
4965 computeDefsForBytecodeOffset(codeBlock, opcodeID, instruction.ptr(), [&] (VirtualRegister operand) {
4966 if (local()->virtualRegister() == operand)
4967 invalidate();
4968 });
4969 offset += instruction->size();
4970 }
4971}
4972
4973void StructureForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4974{
4975 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
4976 if (isValid())
4977 return;
4978
4979 OpcodeID lastOpcodeID = generator.m_lastOpcodeID;
4980 InstructionStream::MutableRef lastInstruction = generator.m_lastInstruction;
4981 for (const auto& instTuple : m_getInsts) {
4982 unsigned instIndex = std::get<0>(instTuple);
4983 int propertyRegIndex = std::get<1>(instTuple);
4984 auto instruction = generator.m_writer.ref(instIndex);
4985 auto end = instIndex + instruction->size();
4986 ASSERT(instruction->isWide());
4987
4988 generator.m_writer.seek(instIndex);
4989
4990 auto bytecode = instruction->as<OpGetDirectPname>();
4991
4992 // disable peephole optimizations
4993 generator.m_lastOpcodeID = op_end;
4994
4995 // Change the opcode to get_by_val.
4996 // 1. dst stays the same.
4997 // 2. base stays the same.
4998 // 3. property gets switched to the original property.
4999 OpGetByVal::emit<OpcodeSize::Wide>(&generator, bytecode.m_dst, bytecode.m_base, VirtualRegister(propertyRegIndex));
5000
5001 // 4. nop out the remaining bytes
5002 while (generator.m_writer.position() < end)
5003 OpNop::emit<OpcodeSize::Narrow>(&generator);
5004 }
5005 generator.m_writer.seek(generator.m_writer.size());
5006 if (generator.m_lastInstruction.offset() + generator.m_lastInstruction->size() != generator.m_writer.size()) {
5007 generator.m_lastOpcodeID = lastOpcodeID;
5008 generator.m_lastInstruction = lastInstruction;
5009 }
5010}
5011
5012void IndexedForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
5013{
5014 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
5015 if (isValid())
5016 return;
5017
5018 for (const auto& instPair : m_getInsts) {
5019 unsigned instIndex = instPair.first;
5020 int propertyRegIndex = instPair.second;
5021 // FIXME: we should not have to force this get_by_val to be wide, just guarantee that propertyRegIndex fits
5022 // https://bugs.webkit.org/show_bug.cgi?id=190929
5023 generator.m_writer.ref(instIndex)->cast<OpGetByVal>()->setProperty(VirtualRegister(propertyRegIndex), []() {
5024 ASSERT_NOT_REACHED();
5025 return VirtualRegister();
5026 });
5027 }
5028}
5029
5030void StaticPropertyAnalysis::record()
5031{
5032 auto* instruction = m_instructionRef.ptr();
5033 auto size = m_propertyIndexes.size();
5034 switch (instruction->opcodeID()) {
5035 case OpNewObject::opcodeID:
5036 instruction->cast<OpNewObject>()->setInlineCapacity(size, []() {
5037 return 255;
5038 });
5039 return;
5040 case OpCreateThis::opcodeID:
5041 instruction->cast<OpCreateThis>()->setInlineCapacity(size, []() {
5042 return 255;
5043 });
5044 return;
5045 default:
5046 ASSERT_NOT_REACHED();
5047 }
5048}
5049
5050void BytecodeGenerator::emitToThis()
5051{
5052 OpToThis::emit(this, kill(&m_thisRegister));
5053 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
5054}
5055
5056} // namespace JSC
5057
5058namespace WTF {
5059
5060void printInternal(PrintStream& out, JSC::Variable::VariableKind kind)
5061{
5062 switch (kind) {
5063 case JSC::Variable::NormalVariable:
5064 out.print("Normal");
5065 return;
5066 case JSC::Variable::SpecialVariable:
5067 out.print("Special");
5068 return;
5069 }
5070 RELEASE_ASSERT_NOT_REACHED();
5071}
5072
5073} // namespace WTF
5074
5075