1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include "config.h"
32#include "BytecodeGenerator.h"
33
34#include "ArithProfile.h"
35#include "BuiltinExecutables.h"
36#include "BuiltinNames.h"
37#include "BytecodeGeneratorification.h"
38#include "BytecodeLivenessAnalysis.h"
39#include "BytecodeStructs.h"
40#include "BytecodeUseDef.h"
41#include "CatchScope.h"
42#include "DefinePropertyAttributes.h"
43#include "Interpreter.h"
44#include "JSAsyncGeneratorFunction.h"
45#include "JSBigInt.h"
46#include "JSCInlines.h"
47#include "JSFixedArray.h"
48#include "JSFunction.h"
49#include "JSGeneratorFunction.h"
50#include "JSImmutableButterfly.h"
51#include "JSLexicalEnvironment.h"
52#include "JSTemplateObjectDescriptor.h"
53#include "LowLevelInterpreter.h"
54#include "Options.h"
55#include "PreciseJumpTargetsInlines.h"
56#include "StackAlignment.h"
57#include "StrongInlines.h"
58#include "SuperSamplerBytecodeScope.h"
59#include "UnlinkedCodeBlock.h"
60#include "UnlinkedEvalCodeBlock.h"
61#include "UnlinkedFunctionCodeBlock.h"
62#include "UnlinkedMetadataTableInlines.h"
63#include "UnlinkedModuleProgramCodeBlock.h"
64#include "UnlinkedProgramCodeBlock.h"
65#include <wtf/BitVector.h>
66#include <wtf/CommaPrinter.h>
67#include <wtf/Optional.h>
68#include <wtf/SmallPtrSet.h>
69#include <wtf/StdLibExtras.h>
70#include <wtf/text/WTFString.h>
71
72namespace JSC {
73
74template<typename CallOp, typename = std::true_type>
75struct VarArgsOp;
76
77template<typename CallOp>
78struct VarArgsOp<CallOp, std::enable_if_t<std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
79 using type = OpTailCallVarargs;
80};
81
82
83template<typename CallOp>
84struct VarArgsOp<CallOp, std::enable_if_t<!std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
85 using type = OpCallVarargs;
86};
87
88
89template<typename T>
90static inline void shrinkToFit(T& segmentedVector)
91{
92 while (segmentedVector.size() && !segmentedVector.last().refCount())
93 segmentedVector.removeLast();
94}
95
96void Label::setLocation(BytecodeGenerator& generator, unsigned location)
97{
98 m_location = location;
99
100 for (auto offset : m_unresolvedJumps) {
101 auto instruction = generator.m_writer.ref(offset);
102 int target = m_location - offset;
103
104#define CASE(__op) \
105 case __op::opcodeID: \
106 instruction->cast<__op>()->setTargetLabel(BoundLabel(target), [&]() { \
107 generator.m_codeBlock->addOutOfLineJumpTarget(instruction.offset(), target); \
108 return BoundLabel(); \
109 }); \
110 break;
111
112 switch (instruction->opcodeID()) {
113 CASE(OpJmp)
114 CASE(OpJtrue)
115 CASE(OpJfalse)
116 CASE(OpJeqNull)
117 CASE(OpJneqNull)
118 CASE(OpJeq)
119 CASE(OpJstricteq)
120 CASE(OpJneq)
121 CASE(OpJneqPtr)
122 CASE(OpJnstricteq)
123 CASE(OpJless)
124 CASE(OpJlesseq)
125 CASE(OpJgreater)
126 CASE(OpJgreatereq)
127 CASE(OpJnless)
128 CASE(OpJnlesseq)
129 CASE(OpJngreater)
130 CASE(OpJngreatereq)
131 CASE(OpJbelow)
132 CASE(OpJbeloweq)
133 default:
134 ASSERT_NOT_REACHED();
135 }
136#undef CASE
137 }
138}
139
140int BoundLabel::target()
141{
142 switch (m_type) {
143 case Offset:
144 return m_target;
145 case GeneratorBackward:
146 return m_target - m_generator->m_writer.position();
147 case GeneratorForward:
148 return 0;
149 default:
150 RELEASE_ASSERT_NOT_REACHED();
151 }
152}
153
154int BoundLabel::saveTarget()
155{
156 if (m_type == GeneratorForward) {
157 m_savedTarget = m_generator->m_writer.position();
158 return 0;
159 }
160
161 m_savedTarget = target();
162 return m_savedTarget;
163}
164
165int BoundLabel::commitTarget()
166{
167 if (m_type == GeneratorForward) {
168 m_label->m_unresolvedJumps.append(m_savedTarget);
169 return 0;
170 }
171
172 return m_savedTarget;
173}
174
175void Variable::dump(PrintStream& out) const
176{
177 out.print(
178 "{ident = ", m_ident,
179 ", offset = ", m_offset,
180 ", local = ", RawPointer(m_local),
181 ", attributes = ", m_attributes,
182 ", kind = ", m_kind,
183 ", symbolTableConstantIndex = ", m_symbolTableConstantIndex,
184 ", isLexicallyScoped = ", m_isLexicallyScoped, "}");
185}
186
187FinallyContext::FinallyContext(BytecodeGenerator& generator, Label& finallyLabel)
188 : m_outerContext(generator.m_currentFinallyContext)
189 , m_finallyLabel(&finallyLabel)
190{
191 ASSERT(m_jumps.isEmpty());
192 m_completionRecord.typeRegister = generator.newTemporary();
193 m_completionRecord.valueRegister = generator.newTemporary();
194 generator.emitLoad(completionTypeRegister(), CompletionType::Normal);
195 generator.moveEmptyValue(completionValueRegister());
196}
197
198ParserError BytecodeGenerator::generate()
199{
200 m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
201
202 emitLogShadowChickenPrologueIfNecessary();
203
204 // If we have declared a variable named "arguments" and we are using arguments then we should
205 // perform that assignment now.
206 if (m_needToInitializeArguments)
207 initializeVariable(variable(propertyNames().arguments), m_argumentsRegister);
208
209 if (m_restParameter)
210 m_restParameter->emit(*this);
211
212 {
213 RefPtr<RegisterID> temp = newTemporary();
214 RefPtr<RegisterID> tolLevelScope;
215 for (auto functionPair : m_functionsToInitialize) {
216 FunctionMetadataNode* metadata = functionPair.first;
217 FunctionVariableType functionType = functionPair.second;
218 emitNewFunction(temp.get(), metadata);
219 if (functionType == NormalFunctionVariable)
220 initializeVariable(variable(metadata->ident()), temp.get());
221 else if (functionType == TopLevelFunctionVariable) {
222 if (!tolLevelScope) {
223 // We know this will resolve to the top level scope or global object because our parser/global initialization code
224 // doesn't allow let/const/class variables to have the same names as functions.
225 // This is a top level function, and it's an error to ever create a top level function
226 // name that would resolve to a lexical variable. E.g:
227 // ```
228 // function f() {
229 // {
230 // let x;
231 // {
232 // //// error thrown here
233 // eval("function x(){}");
234 // }
235 // }
236 // }
237 // ```
238 // Therefore, we're guaranteed to have this resolve to a top level variable.
239 RefPtr<RegisterID> tolLevelObjectScope = emitResolveScope(nullptr, Variable(metadata->ident()));
240 tolLevelScope = newBlockScopeVariable();
241 move(tolLevelScope.get(), tolLevelObjectScope.get());
242 }
243 emitPutToScope(tolLevelScope.get(), Variable(metadata->ident()), temp.get(), ThrowIfNotFound, InitializationMode::NotInitialization);
244 } else
245 RELEASE_ASSERT_NOT_REACHED();
246 }
247 }
248
249 bool callingClassConstructor = constructorKind() != ConstructorKind::None && !isConstructor();
250 if (!callingClassConstructor)
251 m_scopeNode->emitBytecode(*this);
252 else {
253 // At this point we would have emitted an unconditional throw followed by some nonsense that's
254 // just an artifact of how this generator is structured. That code never runs, but it confuses
255 // bytecode analyses because it constitutes an unterminated basic block. So, we terminate the
256 // basic block the strongest way possible.
257 emitUnreachable();
258 }
259
260 for (auto& handler : m_exceptionHandlersToEmit) {
261 Ref<Label> realCatchTarget = newLabel();
262 TryData* tryData = handler.tryData;
263
264 OpCatch::emit(this, handler.exceptionRegister, handler.thrownValueRegister);
265 realCatchTarget->setLocation(*this, m_lastInstruction.offset());
266 if (handler.completionTypeRegister.isValid()) {
267 RegisterID completionTypeRegister { handler.completionTypeRegister };
268 CompletionType completionType =
269 tryData->handlerType == HandlerType::Finally || tryData->handlerType == HandlerType::SynthesizedFinally
270 ? CompletionType::Throw
271 : CompletionType::Normal;
272 emitLoad(&completionTypeRegister, completionType);
273 }
274 m_codeBlock->addJumpTarget(m_lastInstruction.offset());
275
276
277 emitJump(tryData->target.get());
278 tryData->target = WTFMove(realCatchTarget);
279 }
280
281 m_staticPropertyAnalyzer.kill();
282
283 for (auto& range : m_tryRanges) {
284 int start = range.start->bind();
285 int end = range.end->bind();
286
287 // This will happen for empty try blocks and for some cases of finally blocks:
288 //
289 // try {
290 // try {
291 // } finally {
292 // return 42;
293 // // *HERE*
294 // }
295 // } finally {
296 // print("things");
297 // }
298 //
299 // The return will pop scopes to execute the outer finally block. But this includes
300 // popping the try context for the inner try. The try context is live in the fall-through
301 // part of the finally block not because we will emit a handler that overlaps the finally,
302 // but because we haven't yet had a chance to plant the catch target. Then when we finish
303 // emitting code for the outer finally block, we repush the try contex, this time with a
304 // new start index. But that means that the start index for the try range corresponding
305 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
306 // than the end index of the try block. This is harmless since end < start handlers will
307 // never get matched in our logic, but we do the runtime a favor and choose to not emit
308 // such handlers at all.
309 if (end <= start)
310 continue;
311
312 UnlinkedHandlerInfo info(static_cast<uint32_t>(start), static_cast<uint32_t>(end),
313 static_cast<uint32_t>(range.tryData->target->bind()), range.tryData->handlerType);
314 m_codeBlock->addExceptionHandler(info);
315 }
316
317
318 if (isGeneratorOrAsyncFunctionBodyParseMode(m_codeBlock->parseMode()))
319 performGeneratorification(*this, m_codeBlock.get(), m_writer, m_generatorFrameSymbolTable.get(), m_generatorFrameSymbolTableIndex);
320
321 RELEASE_ASSERT(static_cast<unsigned>(m_codeBlock->numCalleeLocals()) < static_cast<unsigned>(FirstConstantRegisterIndex));
322 m_codeBlock->setInstructions(m_writer.finalize());
323
324 m_codeBlock->shrinkToFit();
325
326 if (m_expressionTooDeep)
327 return ParserError(ParserError::OutOfMemory);
328 return ParserError(ParserError::ErrorNone);
329}
330
331BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, const VariableEnvironment* parentScopeTDZVariables)
332 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
333 , m_scopeNode(programNode)
334 , m_codeBlock(vm, codeBlock)
335 , m_thisRegister(CallFrame::thisArgumentOffset())
336 , m_codeType(GlobalCode)
337 , m_vm(&vm)
338 , m_needsToUpdateArrowFunctionContext(programNode->usesArrowFunction() || programNode->usesEval())
339{
340 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
341
342 for (auto& constantRegister : m_linkTimeConstantRegisters)
343 constantRegister = nullptr;
344
345 allocateCalleeSaveSpace();
346
347 m_codeBlock->setNumParameters(1); // Allocate space for "this"
348
349 emitEnter();
350
351 allocateAndEmitScope();
352
353 emitCheckTraps();
354
355 const FunctionStack& functionStack = programNode->functionStack();
356
357 for (auto* function : functionStack)
358 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
359
360 if (Options::validateBytecode()) {
361 for (auto& entry : programNode->varDeclarations())
362 RELEASE_ASSERT(entry.value.isVar());
363 }
364 codeBlock->setVariableDeclarations(programNode->varDeclarations());
365 codeBlock->setLexicalDeclarations(programNode->lexicalVariables());
366 // Even though this program may have lexical variables that go under TDZ, when linking the get_from_scope/put_to_scope
367 // operations we emit we will have ResolveTypes that implictly do TDZ checks. Therefore, we don't need
368 // additional TDZ checks on top of those. This is why we can omit pushing programNode->lexicalVariables()
369 // to the TDZ stack.
370
371 if (needsToUpdateArrowFunctionContext()) {
372 initializeArrowFunctionContextScopeIfNeeded();
373 emitPutThisToArrowFunctionContextScope();
374 }
375}
376
377BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionNode* functionNode, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, const VariableEnvironment* parentScopeTDZVariables)
378 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
379 , m_scopeNode(functionNode)
380 , m_codeBlock(vm, codeBlock)
381 , m_codeType(FunctionCode)
382 , m_vm(&vm)
383 , m_isBuiltinFunction(codeBlock->isBuiltinFunction())
384 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
385 // FIXME: We should be able to have tail call elimination with the profiler
386 // enabled. This is currently not possible because the profiler expects
387 // op_will_call / op_did_call pairs before and after a call, which are not
388 // compatible with tail calls (we have no way of emitting op_did_call).
389 // https://bugs.webkit.org/show_bug.cgi?id=148819
390 , m_inTailPosition(Options::useTailCalls() && !isConstructor() && constructorKind() == ConstructorKind::None && isStrictMode())
391 , m_needsToUpdateArrowFunctionContext(functionNode->usesArrowFunction() || functionNode->usesEval())
392 , m_derivedContextType(codeBlock->derivedContextType())
393{
394 for (auto& constantRegister : m_linkTimeConstantRegisters)
395 constantRegister = nullptr;
396
397 if (m_isBuiltinFunction)
398 m_shouldEmitDebugHooks = false;
399
400 allocateCalleeSaveSpace();
401
402 SymbolTable* functionSymbolTable = SymbolTable::create(*m_vm);
403 functionSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
404 int symbolTableConstantIndex = 0;
405
406 FunctionParameters& parameters = *functionNode->parameters();
407 // http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
408 // This implements IsSimpleParameterList in the Ecma 2015 spec.
409 // If IsSimpleParameterList is false, we will create a strict-mode like arguments object.
410 // IsSimpleParameterList is false if the argument list contains any default parameter values,
411 // a rest parameter, or any destructuring patterns.
412 // If we do have default parameters, destructuring parameters, or a rest parameter, our parameters will be allocated in a different scope.
413 bool isSimpleParameterList = parameters.isSimpleParameterList();
414
415 SourceParseMode parseMode = codeBlock->parseMode();
416
417 bool containsArrowOrEvalButNotInArrowBlock = ((functionNode->usesArrowFunction() && functionNode->doAnyInnerArrowFunctionsUseAnyFeature()) || functionNode->usesEval()) && !m_codeBlock->isArrowFunction();
418 bool shouldCaptureSomeOfTheThings = m_shouldEmitDebugHooks || functionNode->needsActivation() || containsArrowOrEvalButNotInArrowBlock;
419
420 bool shouldCaptureAllOfTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
421 bool needsArguments = ((functionNode->usesArguments() && !codeBlock->isArrowFunction()) || codeBlock->usesEval() || (functionNode->usesArrowFunction() && !codeBlock->isArrowFunction() && isArgumentsUsedInInnerArrowFunction()));
422
423 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
424 // Generator and AsyncFunction never provides "arguments". "arguments" reference will be resolved in an upper generator function scope.
425 needsArguments = false;
426 }
427
428 if (isGeneratorOrAsyncFunctionWrapperParseMode(parseMode) && needsArguments) {
429 // Generator does not provide "arguments". Instead, wrapping GeneratorFunction provides "arguments".
430 // This is because arguments of a generator should be evaluated before starting it.
431 // To workaround it, we evaluate these arguments as arguments of a wrapping generator function, and reference it from a generator.
432 //
433 // function *gen(a, b = hello())
434 // {
435 // return {
436 // @generatorNext: function (@generator, @generatorState, @generatorValue, @generatorResumeMode, @generatorFrame)
437 // {
438 // arguments; // This `arguments` should reference to the gen's arguments.
439 // ...
440 // }
441 // }
442 // }
443 shouldCaptureSomeOfTheThings = true;
444 }
445
446 if (shouldCaptureAllOfTheThings)
447 functionNode->varDeclarations().markAllVariablesAsCaptured();
448
449 auto captures = scopedLambda<bool (UniquedStringImpl*)>([&] (UniquedStringImpl* uid) -> bool {
450 if (!shouldCaptureSomeOfTheThings)
451 return false;
452 if (needsArguments && uid == propertyNames().arguments.impl()) {
453 // Actually, we only need to capture the arguments object when we "need full activation"
454 // because of name scopes. But historically we did it this way, so for now we just preserve
455 // the old behavior.
456 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=143072
457 return true;
458 }
459 return functionNode->captures(uid);
460 });
461 auto varKind = [&] (UniquedStringImpl* uid) -> VarKind {
462 return captures(uid) ? VarKind::Scope : VarKind::Stack;
463 };
464
465 m_calleeRegister.setIndex(CallFrameSlot::callee);
466
467 initializeParameters(parameters);
468 ASSERT(!(isSimpleParameterList && m_restParameter));
469
470 emitEnter();
471
472 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode))
473 m_generatorRegister = &m_parameters[1];
474
475 allocateAndEmitScope();
476
477 emitCheckTraps();
478
479 if (functionNameIsInScope(functionNode->ident(), functionNode->functionMode())) {
480 ASSERT(parseMode != SourceParseMode::GeneratorBodyMode);
481 ASSERT(!isAsyncFunctionBodyParseMode(parseMode));
482 bool isDynamicScope = functionNameScopeIsDynamic(codeBlock->usesEval(), codeBlock->isStrictMode());
483 bool isFunctionNameCaptured = captures(functionNode->ident().impl());
484 bool markAsCaptured = isDynamicScope || isFunctionNameCaptured;
485 emitPushFunctionNameScope(functionNode->ident(), &m_calleeRegister, markAsCaptured);
486 }
487
488 if (shouldCaptureSomeOfTheThings)
489 m_lexicalEnvironmentRegister = addVar();
490
491 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode) || shouldCaptureSomeOfTheThings || vm.typeProfiler())
492 symbolTableConstantIndex = addConstantValue(functionSymbolTable)->index();
493
494 // We can allocate the "var" environment if we don't have default parameter expressions. If we have
495 // default parameter expressions, we have to hold off on allocating the "var" environment because
496 // the parent scope of the "var" environment is the parameter environment.
497 if (isSimpleParameterList)
498 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, shouldCaptureSomeOfTheThings);
499
500 // Figure out some interesting facts about our arguments.
501 bool capturesAnyArgumentByName = false;
502 if (functionNode->hasCapturedVariables()) {
503 FunctionParameters& parameters = *functionNode->parameters();
504 for (size_t i = 0; i < parameters.size(); ++i) {
505 auto pattern = parameters.at(i).first;
506 if (!pattern->isBindingNode())
507 continue;
508 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
509 capturesAnyArgumentByName |= captures(ident.impl());
510 }
511 }
512
513 if (capturesAnyArgumentByName)
514 ASSERT(m_lexicalEnvironmentRegister);
515
516 // Need to know what our functions are called. Parameters have some goofy behaviors when it
517 // comes to functions of the same name.
518 for (FunctionMetadataNode* function : functionNode->functionStack())
519 m_functions.add(function->ident().impl());
520
521 if (needsArguments) {
522 // Create the arguments object now. We may put the arguments object into the activation if
523 // it is captured. Either way, we create two arguments object variables: one is our
524 // private variable that is immutable, and another that is the user-visible variable. The
525 // immutable one is only used here, or during formal parameter resolutions if we opt for
526 // DirectArguments.
527
528 m_argumentsRegister = addVar();
529 m_argumentsRegister->ref();
530 }
531
532 if (needsArguments && !codeBlock->isStrictMode() && isSimpleParameterList) {
533 // If we captured any formal parameter by name, then we use ScopedArguments. Otherwise we
534 // use DirectArguments. With ScopedArguments, we lift all of our arguments into the
535 // activation.
536
537 if (capturesAnyArgumentByName) {
538 functionSymbolTable->setArgumentsLength(vm, parameters.size());
539
540 // For each parameter, we have two possibilities:
541 // Either it's a binding node with no function overlap, in which case it gets a name
542 // in the symbol table - or it just gets space reserved in the symbol table. Either
543 // way we lift the value into the scope.
544 for (unsigned i = 0; i < parameters.size(); ++i) {
545 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
546 functionSymbolTable->setArgumentOffset(vm, i, offset);
547 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first)) {
548 VarOffset varOffset(offset);
549 SymbolTableEntry entry(varOffset);
550 // Stores to these variables via the ScopedArguments object will not do
551 // notifyWrite(), since that would be cumbersome. Also, watching formal
552 // parameters when "arguments" is in play is unlikely to be super profitable.
553 // So, we just disable it.
554 entry.disableWatching(*m_vm);
555 functionSymbolTable->set(NoLockingNecessary, name, entry);
556 }
557 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, UINT_MAX, virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), symbolTableConstantIndex, offset.offset());
558 }
559
560 // This creates a scoped arguments object and copies the overflow arguments into the
561 // scope. It's the equivalent of calling ScopedArguments::createByCopying().
562 OpCreateScopedArguments::emit(this, m_argumentsRegister, m_lexicalEnvironmentRegister);
563 } else {
564 // We're going to put all parameters into the DirectArguments object. First ensure
565 // that the symbol table knows that this is happening.
566 for (unsigned i = 0; i < parameters.size(); ++i) {
567 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first))
568 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(DirectArgumentsOffset(i))));
569 }
570
571 OpCreateDirectArguments::emit(this, m_argumentsRegister);
572 }
573 } else if (isSimpleParameterList) {
574 // Create the formal parameters the normal way. Any of them could be captured, or not. If
575 // captured, lift them into the scope. We cannot do this if we have default parameter expressions
576 // because when default parameter expressions exist, they belong in their own lexical environment
577 // separate from the "var" lexical environment.
578 for (unsigned i = 0; i < parameters.size(); ++i) {
579 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
580 if (!name)
581 continue;
582
583 if (!captures(name)) {
584 // This is the easy case - just tell the symbol table about the argument. It will
585 // be accessed directly.
586 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(virtualRegisterForArgument(1 + i))));
587 continue;
588 }
589
590 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
591 const Identifier& ident =
592 static_cast<const BindingNode*>(parameters.at(i).first)->boundProperty();
593 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(offset)));
594
595 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, addConstant(ident), virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), symbolTableConstantIndex, offset.offset());
596 }
597 }
598
599 if (needsArguments && (codeBlock->isStrictMode() || !isSimpleParameterList)) {
600 // Allocate a cloned arguments object.
601 OpCreateClonedArguments::emit(this, m_argumentsRegister);
602 }
603
604 // There are some variables that need to be preinitialized to something other than Undefined:
605 //
606 // - "arguments": unless it's used as a function or parameter, this should refer to the
607 // arguments object.
608 //
609 // - functions: these always override everything else.
610 //
611 // The most logical way to do all of this is to initialize none of the variables until now,
612 // and then initialize them in BytecodeGenerator::generate() in such an order that the rules
613 // for how these things override each other end up holding. We would initialize "arguments" first,
614 // then all arguments, then the functions.
615 //
616 // But some arguments are already initialized by default, since if they aren't captured and we
617 // don't have "arguments" then we just point the symbol table at the stack slot of those
618 // arguments. We end up initializing the rest of the arguments that have an uncomplicated
619 // binding (i.e. don't involve destructuring) above when figuring out how to lay them out,
620 // because that's just the simplest thing. This means that when we initialize them, we have to
621 // watch out for the things that override arguments (namely, functions).
622
623 // This is our final act of weirdness. "arguments" is overridden by everything except the
624 // callee. We add it to the symbol table if it's not already there and it's not an argument.
625 bool shouldCreateArgumentsVariableInParameterScope = false;
626 if (needsArguments) {
627 // If "arguments" is overridden by a function or destructuring parameter name, then it's
628 // OK for us to call createVariable() because it won't change anything. It's also OK for
629 // us to them tell BytecodeGenerator::generate() to write to it because it will do so
630 // before it initializes functions and destructuring parameters. But if "arguments" is
631 // overridden by a "simple" function parameter, then we have to bail: createVariable()
632 // would assert and BytecodeGenerator::generate() would write the "arguments" after the
633 // argument value had already been properly initialized.
634
635 bool haveParameterNamedArguments = false;
636 for (unsigned i = 0; i < parameters.size(); ++i) {
637 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
638 if (name == propertyNames().arguments.impl()) {
639 haveParameterNamedArguments = true;
640 break;
641 }
642 }
643
644 bool shouldCreateArgumensVariable = !haveParameterNamedArguments
645 && !SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(m_codeBlock->parseMode());
646 shouldCreateArgumentsVariableInParameterScope = shouldCreateArgumensVariable && !isSimpleParameterList;
647 // Do not create arguments variable in case of Arrow function. Value will be loaded from parent scope
648 if (shouldCreateArgumensVariable && !shouldCreateArgumentsVariableInParameterScope) {
649 createVariable(
650 propertyNames().arguments, varKind(propertyNames().arguments.impl()), functionSymbolTable);
651
652 m_needToInitializeArguments = true;
653 }
654 }
655
656 for (FunctionMetadataNode* function : functionNode->functionStack()) {
657 const Identifier& ident = function->ident();
658 createVariable(ident, varKind(ident.impl()), functionSymbolTable);
659 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
660 }
661 for (auto& entry : functionNode->varDeclarations()) {
662 ASSERT(!entry.value.isLet() && !entry.value.isConst());
663 if (!entry.value.isVar()) // This is either a parameter or callee.
664 continue;
665 if (shouldCreateArgumentsVariableInParameterScope && entry.key.get() == propertyNames().arguments.impl())
666 continue;
667 createVariable(Identifier::fromUid(m_vm, entry.key.get()), varKind(entry.key.get()), functionSymbolTable, IgnoreExisting);
668 }
669
670
671 m_newTargetRegister = addVar();
672 switch (parseMode) {
673 case SourceParseMode::GeneratorWrapperFunctionMode:
674 case SourceParseMode::GeneratorWrapperMethodMode:
675 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
676 case SourceParseMode::AsyncGeneratorWrapperFunctionMode: {
677 m_generatorRegister = addVar();
678
679 // FIXME: Emit to_this only when Generator uses it.
680 // https://bugs.webkit.org/show_bug.cgi?id=151586
681 emitToThis();
682
683 move(m_generatorRegister, &m_calleeRegister);
684 emitCreateThis(m_generatorRegister);
685 break;
686 }
687
688 case SourceParseMode::AsyncArrowFunctionMode:
689 case SourceParseMode::AsyncMethodMode:
690 case SourceParseMode::AsyncFunctionMode: {
691 ASSERT(!isConstructor());
692 ASSERT(constructorKind() == ConstructorKind::None);
693 m_generatorRegister = addVar();
694 m_promiseCapabilityRegister = addVar();
695
696 if (parseMode != SourceParseMode::AsyncArrowFunctionMode) {
697 // FIXME: Emit to_this only when AsyncFunctionBody uses it.
698 // https://bugs.webkit.org/show_bug.cgi?id=151586
699 emitToThis();
700 }
701
702 emitNewObject(m_generatorRegister);
703
704 // let promiseCapability be @newPromiseCapability(@Promise)
705 auto varNewPromiseCapability = variable(propertyNames().builtinNames().newPromiseCapabilityPrivateName());
706 RefPtr<RegisterID> scope = newTemporary();
707 move(scope.get(), emitResolveScope(scope.get(), varNewPromiseCapability));
708 RefPtr<RegisterID> newPromiseCapability = emitGetFromScope(newTemporary(), scope.get(), varNewPromiseCapability, ThrowIfNotFound);
709
710 CallArguments args(*this, nullptr, 1);
711 emitLoad(args.thisRegister(), jsUndefined());
712
713 auto& builtinNames = propertyNames().builtinNames();
714 auto varPromiseConstructor = variable(m_isBuiltinFunction ? builtinNames.InternalPromisePrivateName() : builtinNames.PromisePrivateName());
715 move(scope.get(), emitResolveScope(scope.get(), varPromiseConstructor));
716 emitGetFromScope(args.argumentRegister(0), scope.get(), varPromiseConstructor, ThrowIfNotFound);
717
718 // JSTextPosition(int _line, int _offset, int _lineStartOffset)
719 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
720 emitCall(promiseCapabilityRegister(), newPromiseCapability.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
721 break;
722 }
723
724 case SourceParseMode::AsyncGeneratorBodyMode:
725 case SourceParseMode::AsyncFunctionBodyMode:
726 case SourceParseMode::AsyncArrowFunctionBodyMode:
727 case SourceParseMode::GeneratorBodyMode: {
728 // |this| is already filled correctly before here.
729 emitLoad(m_newTargetRegister, jsUndefined());
730 break;
731 }
732
733 default: {
734 if (SourceParseMode::ArrowFunctionMode != parseMode) {
735 if (isConstructor()) {
736 move(m_newTargetRegister, &m_thisRegister);
737 if (constructorKind() == ConstructorKind::Extends) {
738 moveEmptyValue(&m_thisRegister);
739 } else
740 emitCreateThis(&m_thisRegister);
741 } else if (constructorKind() != ConstructorKind::None)
742 emitThrowTypeError("Cannot call a class constructor without |new|");
743 else {
744 bool shouldEmitToThis = false;
745 if (functionNode->usesThis() || codeBlock->usesEval() || m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseEval())
746 shouldEmitToThis = true;
747 else if ((functionNode->usesSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty()) && !codeBlock->isStrictMode()) {
748 // We must emit to_this when we're not in strict mode because we
749 // will convert |this| to an object, and that object may be passed
750 // to a strict function as |this|. This is observable because that
751 // strict function's to_this will just return the object.
752 //
753 // We don't need to emit this for strict-mode code because
754 // strict-mode code may call another strict function, which will
755 // to_this if it directly uses this; this is OK, because we defer
756 // to_this until |this| is used directly. Strict-mode code might
757 // also call a sloppy mode function, and that will to_this, which
758 // will defer the conversion, again, until necessary.
759 shouldEmitToThis = true;
760 }
761
762 if (shouldEmitToThis)
763 emitToThis();
764 }
765 }
766 break;
767 }
768 }
769
770 // We need load |super| & |this| for arrow function before initializeDefaultParameterValuesAndSetupFunctionScopeStack
771 // if we have default parameter expression. Because |super| & |this| values can be used there
772 if ((SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && !isSimpleParameterList) || parseMode == SourceParseMode::AsyncArrowFunctionBodyMode) {
773 if (functionNode->usesThis() || functionNode->usesSuperProperty())
774 emitLoadThisFromArrowFunctionLexicalEnvironment();
775
776 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
777 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
778 }
779
780 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunction()) {
781 bool canReuseLexicalEnvironment = isSimpleParameterList;
782 initializeArrowFunctionContextScopeIfNeeded(functionSymbolTable, canReuseLexicalEnvironment);
783 emitPutThisToArrowFunctionContextScope();
784 emitPutNewTargetToArrowFunctionContextScope();
785 emitPutDerivedConstructorToArrowFunctionContextScope();
786 }
787
788 // All "addVar()"s needs to happen before "initializeDefaultParameterValuesAndSetupFunctionScopeStack()" is called
789 // because a function's default parameter ExpressionNodes will use temporary registers.
790 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
791
792 Ref<Label> catchLabel = newLabel();
793 TryData* tryFormalParametersData = nullptr;
794 bool needTryCatch = isAsyncFunctionWrapperParseMode(parseMode) && !isSimpleParameterList;
795 if (needTryCatch) {
796 Ref<Label> tryFormalParametersStart = newEmittedLabel();
797 tryFormalParametersData = pushTry(tryFormalParametersStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
798 }
799
800 initializeDefaultParameterValuesAndSetupFunctionScopeStack(parameters, isSimpleParameterList, functionNode, functionSymbolTable, symbolTableConstantIndex, captures, shouldCreateArgumentsVariableInParameterScope);
801
802 if (needTryCatch) {
803 Ref<Label> didNotThrow = newLabel();
804 emitJump(didNotThrow.get());
805 emitLabel(catchLabel.get());
806 popTry(tryFormalParametersData, catchLabel.get());
807
808 RefPtr<RegisterID> thrownValue = newTemporary();
809 emitOutOfLineCatchHandler(thrownValue.get(), nullptr, tryFormalParametersData);
810
811 // return promiseCapability.@reject(thrownValue)
812 RefPtr<RegisterID> reject = emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().rejectPrivateName());
813
814 CallArguments args(*this, nullptr, 1);
815 emitLoad(args.thisRegister(), jsUndefined());
816 move(args.argumentRegister(0), thrownValue.get());
817
818 JSTextPosition divot(functionNode->firstLine(), functionNode->startOffset(), functionNode->lineStartOffset());
819
820 RefPtr<RegisterID> result = emitCall(newTemporary(), reject.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
821 emitReturn(emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().promisePrivateName()));
822
823 emitLabel(didNotThrow.get());
824 }
825
826 // If we don't have default parameter expression, then loading |this| inside an arrow function must be done
827 // after initializeDefaultParameterValuesAndSetupFunctionScopeStack() because that function sets up the
828 // SymbolTable stack and emitLoadThisFromArrowFunctionLexicalEnvironment() consults the SymbolTable stack
829 if (SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && isSimpleParameterList) {
830 if (functionNode->usesThis() || functionNode->usesSuperProperty())
831 emitLoadThisFromArrowFunctionLexicalEnvironment();
832
833 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
834 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
835 }
836
837 // Set up the lexical environment scope as the generator frame. We store the saved and resumed generator registers into this scope with the symbol keys.
838 // Since they are symbol keyed, these variables cannot be reached from the usual code.
839 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
840 m_generatorFrameSymbolTable.set(*m_vm, functionSymbolTable);
841 m_generatorFrameSymbolTableIndex = symbolTableConstantIndex;
842 if (m_lexicalEnvironmentRegister)
843 move(generatorFrameRegister(), m_lexicalEnvironmentRegister);
844 else {
845 // It would be possible that generator does not need to suspend and resume any registers.
846 // In this case, we would like to avoid creating a lexical environment as much as possible.
847 // op_create_generator_frame_environment is a marker, which is similar to op_yield.
848 // Generatorification inserts lexical environment creation if necessary. Otherwise, we convert it to op_mov frame, `undefined`.
849 OpCreateGeneratorFrameEnvironment::emit(this, generatorFrameRegister(), scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
850 }
851 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorFramePrivateName(), generatorFrameRegister());
852 }
853
854 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
855 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
856}
857
858BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, const VariableEnvironment* parentScopeTDZVariables)
859 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
860 , m_scopeNode(evalNode)
861 , m_codeBlock(vm, codeBlock)
862 , m_thisRegister(CallFrame::thisArgumentOffset())
863 , m_codeType(EvalCode)
864 , m_vm(&vm)
865 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
866 , m_needsToUpdateArrowFunctionContext(evalNode->usesArrowFunction() || evalNode->usesEval())
867 , m_derivedContextType(codeBlock->derivedContextType())
868{
869 for (auto& constantRegister : m_linkTimeConstantRegisters)
870 constantRegister = nullptr;
871
872 allocateCalleeSaveSpace();
873
874 m_codeBlock->setNumParameters(1);
875
876 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
877
878 emitEnter();
879
880 allocateAndEmitScope();
881
882 emitCheckTraps();
883
884 for (FunctionMetadataNode* function : evalNode->functionStack()) {
885 m_codeBlock->addFunctionDecl(makeFunction(function));
886 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
887 }
888
889 const VariableEnvironment& varDeclarations = evalNode->varDeclarations();
890 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
891 Vector<Identifier, 0, UnsafeVectorOverflow> hoistedFunctions;
892 for (auto& entry : varDeclarations) {
893 ASSERT(entry.value.isVar());
894 ASSERT(entry.key->isAtomic() || entry.key->isSymbol());
895 if (entry.value.isSloppyModeHoistingCandidate())
896 hoistedFunctions.append(Identifier::fromUid(m_vm, entry.key.get()));
897 else
898 variables.append(Identifier::fromUid(m_vm, entry.key.get()));
899 }
900 codeBlock->adoptVariables(variables);
901 codeBlock->adoptFunctionHoistingCandidates(WTFMove(hoistedFunctions));
902
903 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
904 m_newTargetRegister = addVar();
905
906 if (codeBlock->isArrowFunctionContext() && (evalNode->usesThis() || evalNode->usesSuperProperty()))
907 emitLoadThisFromArrowFunctionLexicalEnvironment();
908
909 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
910 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
911
912 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunctionContext() && !isDerivedConstructorContext()) {
913 initializeArrowFunctionContextScopeIfNeeded();
914 emitPutThisToArrowFunctionContextScope();
915 }
916
917 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
918 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
919}
920
921BytecodeGenerator::BytecodeGenerator(VM& vm, ModuleProgramNode* moduleProgramNode, UnlinkedModuleProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, const VariableEnvironment* parentScopeTDZVariables)
922 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
923 , m_scopeNode(moduleProgramNode)
924 , m_codeBlock(vm, codeBlock)
925 , m_thisRegister(CallFrame::thisArgumentOffset())
926 , m_codeType(ModuleCode)
927 , m_vm(&vm)
928 , m_usesNonStrictEval(false)
929 , m_needsToUpdateArrowFunctionContext(moduleProgramNode->usesArrowFunction() || moduleProgramNode->usesEval())
930{
931 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
932
933 for (auto& constantRegister : m_linkTimeConstantRegisters)
934 constantRegister = nullptr;
935
936 if (m_isBuiltinFunction)
937 m_shouldEmitDebugHooks = false;
938
939 allocateCalleeSaveSpace();
940
941 SymbolTable* moduleEnvironmentSymbolTable = SymbolTable::create(*m_vm);
942 moduleEnvironmentSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
943 moduleEnvironmentSymbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
944
945 bool shouldCaptureAllOfTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
946 if (shouldCaptureAllOfTheThings)
947 moduleProgramNode->varDeclarations().markAllVariablesAsCaptured();
948
949 auto captures = [&] (UniquedStringImpl* uid) -> bool {
950 return moduleProgramNode->captures(uid);
951 };
952 auto lookUpVarKind = [&] (UniquedStringImpl* uid, const VariableEnvironmentEntry& entry) -> VarKind {
953 // Allocate the exported variables in the module environment.
954 if (entry.isExported())
955 return VarKind::Scope;
956
957 // Allocate the namespace variables in the module environment to instantiate
958 // it from the outside of the module code.
959 if (entry.isImportedNamespace())
960 return VarKind::Scope;
961
962 if (entry.isCaptured())
963 return VarKind::Scope;
964 return captures(uid) ? VarKind::Scope : VarKind::Stack;
965 };
966
967 emitEnter();
968
969 allocateAndEmitScope();
970
971 emitCheckTraps();
972
973 m_calleeRegister.setIndex(CallFrameSlot::callee);
974
975 m_codeBlock->setNumParameters(1); // Allocate space for "this"
976
977 // Now declare all variables.
978
979 createVariable(m_vm->propertyNames->builtinNames().metaPrivateName(), VarKind::Scope, moduleEnvironmentSymbolTable, VerifyExisting);
980
981 for (auto& entry : moduleProgramNode->varDeclarations()) {
982 ASSERT(!entry.value.isLet() && !entry.value.isConst());
983 if (!entry.value.isVar()) // This is either a parameter or callee.
984 continue;
985 // Imported bindings are not allocated in the module environment as usual variables' way.
986 // These references remain the "Dynamic" in the unlinked code block. Later, when linking
987 // the code block, we resolve the reference to the "ModuleVar".
988 if (entry.value.isImported() && !entry.value.isImportedNamespace())
989 continue;
990 createVariable(Identifier::fromUid(m_vm, entry.key.get()), lookUpVarKind(entry.key.get(), entry.value), moduleEnvironmentSymbolTable, IgnoreExisting);
991 }
992
993 VariableEnvironment& lexicalVariables = moduleProgramNode->lexicalVariables();
994 instantiateLexicalVariables(lexicalVariables, moduleEnvironmentSymbolTable, ScopeRegisterType::Block, lookUpVarKind);
995
996 // We keep the symbol table in the constant pool.
997 RegisterID* constantSymbolTable = nullptr;
998 if (vm.typeProfiler())
999 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable);
1000 else
1001 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable->cloneScopePart(*m_vm));
1002
1003 pushTDZVariables(lexicalVariables, TDZCheckOptimization::Optimize, TDZRequirement::UnderTDZ);
1004 bool isWithScope = false;
1005 m_lexicalScopeStack.append({ moduleEnvironmentSymbolTable, m_topMostScope, isWithScope, constantSymbolTable->index() });
1006 emitPrefillStackTDZVariables(lexicalVariables, moduleEnvironmentSymbolTable);
1007
1008 // makeFunction assumes that there's correct TDZ stack entries.
1009 // So it should be called after putting our lexical environment to the TDZ stack correctly.
1010
1011 for (FunctionMetadataNode* function : moduleProgramNode->functionStack()) {
1012 const auto& iterator = moduleProgramNode->varDeclarations().find(function->ident().impl());
1013 RELEASE_ASSERT(iterator != moduleProgramNode->varDeclarations().end());
1014 RELEASE_ASSERT(!iterator->value.isImported());
1015
1016 VarKind varKind = lookUpVarKind(iterator->key.get(), iterator->value);
1017 if (varKind == VarKind::Scope) {
1018 // http://www.ecma-international.org/ecma-262/6.0/#sec-moduledeclarationinstantiation
1019 // Section 15.2.1.16.4, step 16-a-iv-1.
1020 // All heap allocated function declarations should be instantiated when the module environment
1021 // is created. They include the exported function declarations and not-exported-but-heap-allocated
1022 // function declarations. This is required because exported function should be instantiated before
1023 // executing the any module in the dependency graph. This enables the modules to link the imported
1024 // bindings before executing the any module code.
1025 //
1026 // And since function declarations are instantiated before executing the module body code, the spec
1027 // allows the functions inside the module to be executed before its module body is executed under
1028 // the circular dependencies. The following is the example.
1029 //
1030 // Module A (executed first):
1031 // import { b } from "B";
1032 // // Here, the module "B" is not executed yet, but the function declaration is already instantiated.
1033 // // So we can call the function exported from "B".
1034 // b();
1035 //
1036 // export function a() {
1037 // }
1038 //
1039 // Module B (executed second):
1040 // import { a } from "A";
1041 //
1042 // export function b() {
1043 // c();
1044 // }
1045 //
1046 // // c is not exported, but since it is referenced from the b, we should instantiate it before
1047 // // executing the "B" module code.
1048 // function c() {
1049 // a();
1050 // }
1051 //
1052 // Module EntryPoint (executed last):
1053 // import "B";
1054 // import "A";
1055 //
1056 m_codeBlock->addFunctionDecl(makeFunction(function));
1057 } else {
1058 // Stack allocated functions can be allocated when executing the module's body.
1059 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
1060 }
1061 }
1062
1063 // Remember the constant register offset to the top-most symbol table. This symbol table will be
1064 // cloned in the code block linking. After that, to create the module environment, we retrieve
1065 // the cloned symbol table from the linked code block by using this offset.
1066 codeBlock->setModuleEnvironmentSymbolTableConstantRegisterOffset(constantSymbolTable->index());
1067}
1068
1069BytecodeGenerator::~BytecodeGenerator()
1070{
1071}
1072
1073void BytecodeGenerator::initializeDefaultParameterValuesAndSetupFunctionScopeStack(
1074 FunctionParameters& parameters, bool isSimpleParameterList, FunctionNode* functionNode, SymbolTable* functionSymbolTable,
1075 int symbolTableConstantIndex, const ScopedLambda<bool (UniquedStringImpl*)>& captures, bool shouldCreateArgumentsVariableInParameterScope)
1076{
1077 Vector<std::pair<Identifier, RefPtr<RegisterID>>> valuesToMoveIntoVars;
1078 ASSERT(!(isSimpleParameterList && shouldCreateArgumentsVariableInParameterScope));
1079 if (!isSimpleParameterList) {
1080 // Refer to the ES6 spec section 9.2.12: http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
1081 // This implements step 21.
1082 VariableEnvironment environment;
1083 Vector<Identifier> allParameterNames;
1084 for (unsigned i = 0; i < parameters.size(); i++)
1085 parameters.at(i).first->collectBoundIdentifiers(allParameterNames);
1086 if (shouldCreateArgumentsVariableInParameterScope)
1087 allParameterNames.append(propertyNames().arguments);
1088 IdentifierSet parameterSet;
1089 for (auto& ident : allParameterNames) {
1090 parameterSet.add(ident.impl());
1091 auto addResult = environment.add(ident);
1092 addResult.iterator->value.setIsLet(); // When we have default parameter expressions, parameters act like "let" variables.
1093 if (captures(ident.impl()))
1094 addResult.iterator->value.setIsCaptured();
1095 }
1096 // This implements step 25 of section 9.2.12.
1097 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1098
1099 if (shouldCreateArgumentsVariableInParameterScope) {
1100 Variable argumentsVariable = variable(propertyNames().arguments);
1101 initializeVariable(argumentsVariable, m_argumentsRegister);
1102 liftTDZCheckIfPossible(argumentsVariable);
1103 }
1104
1105 RefPtr<RegisterID> temp = newTemporary();
1106 for (unsigned i = 0; i < parameters.size(); i++) {
1107 std::pair<DestructuringPatternNode*, ExpressionNode*> parameter = parameters.at(i);
1108 if (parameter.first->isRestParameter())
1109 continue;
1110 if ((i + 1) < m_parameters.size())
1111 move(temp.get(), &m_parameters[i + 1]);
1112 else
1113 emitGetArgument(temp.get(), i);
1114 if (parameter.second) {
1115 RefPtr<RegisterID> condition = emitIsUndefined(newTemporary(), temp.get());
1116 Ref<Label> skipDefaultParameterBecauseNotUndefined = newLabel();
1117 emitJumpIfFalse(condition.get(), skipDefaultParameterBecauseNotUndefined.get());
1118 emitNode(temp.get(), parameter.second);
1119 emitLabel(skipDefaultParameterBecauseNotUndefined.get());
1120 }
1121
1122 parameter.first->bindValue(*this, temp.get());
1123 }
1124
1125 // Final act of weirdness for default parameters. If a "var" also
1126 // has the same name as a parameter, it should start out as the
1127 // value of that parameter. Note, though, that they will be distinct
1128 // bindings.
1129 // This is step 28 of section 9.2.12.
1130 for (auto& entry : functionNode->varDeclarations()) {
1131 if (!entry.value.isVar()) // This is either a parameter or callee.
1132 continue;
1133
1134 if (parameterSet.contains(entry.key)) {
1135 Identifier ident = Identifier::fromUid(m_vm, entry.key.get());
1136 Variable var = variable(ident);
1137 RegisterID* scope = emitResolveScope(nullptr, var);
1138 RefPtr<RegisterID> value = emitGetFromScope(newTemporary(), scope, var, DoNotThrowIfNotFound);
1139 valuesToMoveIntoVars.append(std::make_pair(ident, value));
1140 }
1141 }
1142
1143 // Functions with default parameter expressions must have a separate environment
1144 // record for parameters and "var"s. The "var" environment record must have the
1145 // parameter environment record as its parent.
1146 // See step 28 of section 9.2.12.
1147 bool hasCapturedVariables = !!m_lexicalEnvironmentRegister;
1148 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, hasCapturedVariables);
1149 }
1150
1151 // This completes step 28 of section 9.2.12.
1152 for (unsigned i = 0; i < valuesToMoveIntoVars.size(); i++) {
1153 ASSERT(!isSimpleParameterList);
1154 Variable var = variable(valuesToMoveIntoVars[i].first);
1155 RegisterID* scope = emitResolveScope(nullptr, var);
1156 emitPutToScope(scope, var, valuesToMoveIntoVars[i].second.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
1157 }
1158}
1159
1160bool BytecodeGenerator::needsDerivedConstructorInArrowFunctionLexicalEnvironment()
1161{
1162 ASSERT(m_codeBlock->isClassContext() || !(isConstructor() && constructorKind() == ConstructorKind::Extends));
1163 return m_codeBlock->isClassContext() && isSuperUsedInInnerArrowFunction();
1164}
1165
1166void BytecodeGenerator::initializeArrowFunctionContextScopeIfNeeded(SymbolTable* functionSymbolTable, bool canReuseLexicalEnvironment)
1167{
1168 ASSERT(!m_arrowFunctionContextLexicalEnvironmentRegister);
1169
1170 if (canReuseLexicalEnvironment && m_lexicalEnvironmentRegister) {
1171 RELEASE_ASSERT(!m_codeBlock->isArrowFunction());
1172 RELEASE_ASSERT(functionSymbolTable);
1173
1174 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalEnvironmentRegister;
1175
1176 ScopeOffset offset;
1177
1178 if (isThisUsedInInnerArrowFunction()) {
1179 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1180 functionSymbolTable->set(NoLockingNecessary, propertyNames().thisIdentifier.impl(), SymbolTableEntry(VarOffset(offset)));
1181 }
1182
1183 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1184 offset = functionSymbolTable->takeNextScopeOffset();
1185 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().newTargetLocalPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1186 }
1187
1188 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1189 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1190 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().derivedConstructorPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1191 }
1192
1193 return;
1194 }
1195
1196 VariableEnvironment environment;
1197
1198 if (isThisUsedInInnerArrowFunction()) {
1199 auto addResult = environment.add(propertyNames().thisIdentifier);
1200 addResult.iterator->value.setIsCaptured();
1201 addResult.iterator->value.setIsLet();
1202 }
1203
1204 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1205 auto addTarget = environment.add(propertyNames().builtinNames().newTargetLocalPrivateName());
1206 addTarget.iterator->value.setIsCaptured();
1207 addTarget.iterator->value.setIsLet();
1208 }
1209
1210 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1211 auto derivedConstructor = environment.add(propertyNames().builtinNames().derivedConstructorPrivateName());
1212 derivedConstructor.iterator->value.setIsCaptured();
1213 derivedConstructor.iterator->value.setIsLet();
1214 }
1215
1216 if (environment.size() > 0) {
1217 size_t size = m_lexicalScopeStack.size();
1218 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1219
1220 ASSERT_UNUSED(size, m_lexicalScopeStack.size() == size + 1);
1221
1222 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalScopeStack.last().m_scope;
1223 }
1224}
1225
1226RegisterID* BytecodeGenerator::initializeNextParameter()
1227{
1228 VirtualRegister reg = virtualRegisterForArgument(m_codeBlock->numParameters());
1229 m_parameters.grow(m_parameters.size() + 1);
1230 auto& parameter = registerFor(reg);
1231 parameter.setIndex(reg.offset());
1232 m_codeBlock->addParameter();
1233 return &parameter;
1234}
1235
1236void BytecodeGenerator::initializeParameters(FunctionParameters& parameters)
1237{
1238 // Make sure the code block knows about all of our parameters, and make sure that parameters
1239 // needing destructuring are noted.
1240 m_thisRegister.setIndex(initializeNextParameter()->index()); // this
1241
1242 bool nonSimpleArguments = false;
1243 for (unsigned i = 0; i < parameters.size(); ++i) {
1244 auto parameter = parameters.at(i);
1245 auto pattern = parameter.first;
1246 if (pattern->isRestParameter()) {
1247 RELEASE_ASSERT(!m_restParameter);
1248 m_restParameter = static_cast<RestParameterNode*>(pattern);
1249 nonSimpleArguments = true;
1250 continue;
1251 }
1252 if (parameter.second) {
1253 nonSimpleArguments = true;
1254 continue;
1255 }
1256 if (!nonSimpleArguments)
1257 initializeNextParameter();
1258 }
1259}
1260
1261void BytecodeGenerator::initializeVarLexicalEnvironment(int symbolTableConstantIndex, SymbolTable* functionSymbolTable, bool hasCapturedVariables)
1262{
1263 if (hasCapturedVariables) {
1264 RELEASE_ASSERT(m_lexicalEnvironmentRegister);
1265 OpCreateLexicalEnvironment::emit(this, m_lexicalEnvironmentRegister, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
1266
1267 OpMov::emit(this, scopeRegister(), m_lexicalEnvironmentRegister);
1268
1269 pushLocalControlFlowScope();
1270 }
1271 bool isWithScope = false;
1272 m_lexicalScopeStack.append({ functionSymbolTable, m_lexicalEnvironmentRegister, isWithScope, symbolTableConstantIndex });
1273 m_varScopeLexicalScopeStackIndex = m_lexicalScopeStack.size() - 1;
1274}
1275
1276UniquedStringImpl* BytecodeGenerator::visibleNameForParameter(DestructuringPatternNode* pattern)
1277{
1278 if (pattern->isBindingNode()) {
1279 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
1280 if (!m_functions.contains(ident.impl()))
1281 return ident.impl();
1282 }
1283 return nullptr;
1284}
1285
1286RegisterID* BytecodeGenerator::newRegister()
1287{
1288 m_calleeLocals.append(virtualRegisterForLocal(m_calleeLocals.size()));
1289 int numCalleeLocals = std::max<int>(m_codeBlock->m_numCalleeLocals, m_calleeLocals.size());
1290 numCalleeLocals = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeLocals);
1291 m_codeBlock->m_numCalleeLocals = numCalleeLocals;
1292 return &m_calleeLocals.last();
1293}
1294
1295void BytecodeGenerator::reclaimFreeRegisters()
1296{
1297 shrinkToFit(m_calleeLocals);
1298}
1299
1300RegisterID* BytecodeGenerator::newBlockScopeVariable()
1301{
1302 reclaimFreeRegisters();
1303
1304 return newRegister();
1305}
1306
1307RegisterID* BytecodeGenerator::newTemporary()
1308{
1309 reclaimFreeRegisters();
1310
1311 RegisterID* result = newRegister();
1312 result->setTemporary();
1313 return result;
1314}
1315
1316Ref<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
1317{
1318 shrinkToFit(m_labelScopes);
1319
1320 // Allocate new label scope.
1321 m_labelScopes.append(type, name, labelScopeDepth(), newLabel(), type == LabelScope::Loop ? RefPtr<Label>(newLabel()) : RefPtr<Label>()); // Only loops have continue targets.
1322 return m_labelScopes.last();
1323}
1324
1325Ref<Label> BytecodeGenerator::newLabel()
1326{
1327 shrinkToFit(m_labels);
1328
1329 // Allocate new label ID.
1330 m_labels.append();
1331 return m_labels.last();
1332}
1333
1334Ref<Label> BytecodeGenerator::newEmittedLabel()
1335{
1336 Ref<Label> label = newLabel();
1337 emitLabel(label.get());
1338 return label;
1339}
1340
1341void BytecodeGenerator::recordOpcode(OpcodeID opcodeID)
1342{
1343 ASSERT(m_lastOpcodeID == op_end || (m_lastOpcodeID == m_lastInstruction->opcodeID() && m_writer.position() == m_lastInstruction.offset() + m_lastInstruction->size()));
1344 m_lastInstruction = m_writer.ref();
1345 m_lastOpcodeID = opcodeID;
1346}
1347
1348void BytecodeGenerator::alignWideOpcode()
1349{
1350#if CPU(NEEDS_ALIGNED_ACCESS)
1351 while ((m_writer.position() + 1) % OpcodeSize::Wide)
1352 OpNop::emit<OpcodeSize::Narrow>(this);
1353#endif
1354}
1355
1356void BytecodeGenerator::emitLabel(Label& l0)
1357{
1358 unsigned newLabelIndex = instructions().size();
1359 l0.setLocation(*this, newLabelIndex);
1360
1361 if (m_codeBlock->numberOfJumpTargets()) {
1362 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
1363 ASSERT(lastLabelIndex <= newLabelIndex);
1364 if (newLabelIndex == lastLabelIndex) {
1365 // Peephole optimizations have already been disabled by emitting the last label
1366 return;
1367 }
1368 }
1369
1370 m_codeBlock->addJumpTarget(newLabelIndex);
1371
1372 // This disables peephole optimizations when an instruction is a jump target
1373 m_lastOpcodeID = op_end;
1374}
1375
1376void BytecodeGenerator::emitEnter()
1377{
1378 OpEnter::emit(this);
1379
1380 if (LIKELY(Options::optimizeRecursiveTailCalls())) {
1381 // We must add the end of op_enter as a potential jump target, because the bytecode parser may decide to split its basic block
1382 // to have somewhere to jump to if there is a recursive tail-call that points to this function.
1383 m_codeBlock->addJumpTarget(instructions().size());
1384 // This disables peephole optimizations when an instruction is a jump target
1385 m_lastOpcodeID = op_end;
1386 }
1387}
1388
1389void BytecodeGenerator::emitLoopHint()
1390{
1391 OpLoopHint::emit(this);
1392 emitCheckTraps();
1393}
1394
1395void BytecodeGenerator::emitJump(Label& target)
1396{
1397 OpJmp::emit(this, target.bind(this));
1398}
1399
1400void BytecodeGenerator::emitCheckTraps()
1401{
1402 OpCheckTraps::emit(this);
1403}
1404
1405void ALWAYS_INLINE BytecodeGenerator::rewind()
1406{
1407 ASSERT(m_lastInstruction.isValid());
1408 m_lastOpcodeID = op_end;
1409 m_writer.rewind(m_lastInstruction);
1410}
1411
1412template<typename BinOp, typename JmpOp>
1413bool BytecodeGenerator::fuseCompareAndJump(RegisterID* cond, Label& target, bool swapOperands)
1414{
1415 auto binop = m_lastInstruction->as<BinOp>();
1416 if (cond->index() == binop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1417 rewind();
1418
1419 if (swapOperands)
1420 std::swap(binop.m_lhs, binop.m_rhs);
1421
1422 JmpOp::emit(this, binop.m_lhs, binop.m_rhs, target.bind(this));
1423 return true;
1424 }
1425 return false;
1426}
1427
1428template<typename UnaryOp, typename JmpOp>
1429bool BytecodeGenerator::fuseTestAndJmp(RegisterID* cond, Label& target)
1430{
1431 auto unop = m_lastInstruction->as<UnaryOp>();
1432 if (cond->index() == unop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1433 rewind();
1434
1435 JmpOp::emit(this, unop.m_operand, target.bind(this));
1436 return true;
1437 }
1438 return false;
1439}
1440
1441void BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label& target)
1442{
1443
1444 if (m_lastOpcodeID == op_less) {
1445 if (fuseCompareAndJump<OpLess, OpJless>(cond, target))
1446 return;
1447 } else if (m_lastOpcodeID == op_lesseq) {
1448 if (fuseCompareAndJump<OpLesseq, OpJlesseq>(cond, target))
1449 return;
1450 } else if (m_lastOpcodeID == op_greater) {
1451 if (fuseCompareAndJump<OpGreater, OpJgreater>(cond, target))
1452 return;
1453 } else if (m_lastOpcodeID == op_greatereq) {
1454 if (fuseCompareAndJump<OpGreatereq, OpJgreatereq>(cond, target))
1455 return;
1456 } else if (m_lastOpcodeID == op_eq) {
1457 if (fuseCompareAndJump<OpEq, OpJeq>(cond, target))
1458 return;
1459 } else if (m_lastOpcodeID == op_stricteq) {
1460 if (fuseCompareAndJump<OpStricteq, OpJstricteq>(cond, target))
1461 return;
1462 } else if (m_lastOpcodeID == op_neq) {
1463 if (fuseCompareAndJump<OpNeq, OpJneq>(cond, target))
1464 return;
1465 } else if (m_lastOpcodeID == op_nstricteq) {
1466 if (fuseCompareAndJump<OpNstricteq, OpJnstricteq>(cond, target))
1467 return;
1468 } else if (m_lastOpcodeID == op_below) {
1469 if (fuseCompareAndJump<OpBelow, OpJbelow>(cond, target))
1470 return;
1471 } else if (m_lastOpcodeID == op_beloweq) {
1472 if (fuseCompareAndJump<OpBeloweq, OpJbeloweq>(cond, target))
1473 return;
1474 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1475 if (fuseTestAndJmp<OpEqNull, OpJeqNull>(cond, target))
1476 return;
1477 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1478 if (fuseTestAndJmp<OpNeqNull, OpJneqNull>(cond, target))
1479 return;
1480 }
1481
1482 OpJtrue::emit(this, cond, target.bind(this));
1483}
1484
1485void BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label& target)
1486{
1487 if (m_lastOpcodeID == op_less && target.isForward()) {
1488 if (fuseCompareAndJump<OpLess, OpJnless>(cond, target))
1489 return;
1490 } else if (m_lastOpcodeID == op_lesseq && target.isForward()) {
1491 if (fuseCompareAndJump<OpLesseq, OpJnlesseq>(cond, target))
1492 return;
1493 } else if (m_lastOpcodeID == op_greater && target.isForward()) {
1494 if (fuseCompareAndJump<OpGreater, OpJngreater>(cond, target))
1495 return;
1496 } else if (m_lastOpcodeID == op_greatereq && target.isForward()) {
1497 if (fuseCompareAndJump<OpGreatereq, OpJngreatereq>(cond, target))
1498 return;
1499 } else if (m_lastOpcodeID == op_eq && target.isForward()) {
1500 if (fuseCompareAndJump<OpEq, OpJneq>(cond, target))
1501 return;
1502 } else if (m_lastOpcodeID == op_stricteq && target.isForward()) {
1503 if (fuseCompareAndJump<OpStricteq, OpJnstricteq>(cond, target))
1504 return;
1505 } else if (m_lastOpcodeID == op_neq && target.isForward()) {
1506 if (fuseCompareAndJump<OpNeq, OpJeq>(cond, target))
1507 return;
1508 } else if (m_lastOpcodeID == op_nstricteq && target.isForward()) {
1509 if (fuseCompareAndJump<OpNstricteq, OpJstricteq>(cond, target))
1510 return;
1511 } else if (m_lastOpcodeID == op_below && target.isForward()) {
1512 if (fuseCompareAndJump<OpBelow, OpJbeloweq>(cond, target, true))
1513 return;
1514 } else if (m_lastOpcodeID == op_beloweq && target.isForward()) {
1515 if (fuseCompareAndJump<OpBeloweq, OpJbelow>(cond, target, true))
1516 return;
1517 } else if (m_lastOpcodeID == op_not) {
1518 if (fuseTestAndJmp<OpNot, OpJtrue>(cond, target))
1519 return;
1520 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1521 if (fuseTestAndJmp<OpEqNull, OpJneqNull>(cond, target))
1522 return;
1523 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1524 if (fuseTestAndJmp<OpNeqNull, OpJeqNull>(cond, target))
1525 return;
1526 }
1527
1528 OpJfalse::emit(this, cond, target.bind(this));
1529}
1530
1531void BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label& target)
1532{
1533 OpJneqPtr::emit(this, cond, Special::CallFunction, target.bind(this));
1534}
1535
1536void BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label& target)
1537{
1538 OpJneqPtr::emit(this, cond, Special::ApplyFunction, target.bind(this));
1539}
1540
1541bool BytecodeGenerator::hasConstant(const Identifier& ident) const
1542{
1543 UniquedStringImpl* rep = ident.impl();
1544 return m_identifierMap.contains(rep);
1545}
1546
1547unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1548{
1549 UniquedStringImpl* rep = ident.impl();
1550 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1551 if (result.isNewEntry)
1552 m_codeBlock->addIdentifier(ident);
1553
1554 return result.iterator->value;
1555}
1556
1557// We can't hash JSValue(), so we use a dedicated data member to cache it.
1558RegisterID* BytecodeGenerator::addConstantEmptyValue()
1559{
1560 if (!m_emptyValueRegister) {
1561 int index = addConstantIndex();
1562 m_codeBlock->addConstant(JSValue());
1563 m_emptyValueRegister = &m_constantPoolRegisters[index];
1564 }
1565
1566 return m_emptyValueRegister;
1567}
1568
1569RegisterID* BytecodeGenerator::addConstantValue(JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1570{
1571 if (!v)
1572 return addConstantEmptyValue();
1573
1574 int index = m_nextConstantOffset;
1575
1576 if (sourceCodeRepresentation == SourceCodeRepresentation::Double && v.isInt32())
1577 v = jsDoubleNumber(v.asNumber());
1578 EncodedJSValueWithRepresentation valueMapKey { JSValue::encode(v), sourceCodeRepresentation };
1579 JSValueMap::AddResult result = m_jsValueMap.add(valueMapKey, m_nextConstantOffset);
1580 if (result.isNewEntry) {
1581 addConstantIndex();
1582 m_codeBlock->addConstant(v, sourceCodeRepresentation);
1583 } else
1584 index = result.iterator->value;
1585 return &m_constantPoolRegisters[index];
1586}
1587
1588RegisterID* BytecodeGenerator::moveLinkTimeConstant(RegisterID* dst, LinkTimeConstant type)
1589{
1590 unsigned constantIndex = static_cast<unsigned>(type);
1591 if (!m_linkTimeConstantRegisters[constantIndex]) {
1592 int index = addConstantIndex();
1593 m_codeBlock->addConstant(type);
1594 m_linkTimeConstantRegisters[constantIndex] = &m_constantPoolRegisters[index];
1595 }
1596
1597 if (!dst)
1598 return m_linkTimeConstantRegisters[constantIndex];
1599
1600 OpMov::emit(this, dst, m_linkTimeConstantRegisters[constantIndex]);
1601
1602 return dst;
1603}
1604
1605RegisterID* BytecodeGenerator::moveEmptyValue(RegisterID* dst)
1606{
1607 RefPtr<RegisterID> emptyValue = addConstantEmptyValue();
1608
1609 OpMov::emit(this, dst, emptyValue.get());
1610
1611 return dst;
1612}
1613
1614RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1615{
1616 ASSERT(src != m_emptyValueRegister);
1617
1618 m_staticPropertyAnalyzer.mov(dst, src);
1619 OpMov::emit(this, dst, src);
1620
1621 return dst;
1622}
1623
1624RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src, OperandTypes types)
1625{
1626 switch (opcodeID) {
1627 case op_not:
1628 emitUnaryOp<OpNot>(dst, src);
1629 break;
1630 case op_negate:
1631 OpNegate::emit(this, dst, src, types);
1632 break;
1633 case op_bitnot:
1634 emitUnaryOp<OpBitnot>(dst, src);
1635 break;
1636 case op_to_number:
1637 emitUnaryOp<OpToNumber>(dst, src);
1638 break;
1639 default:
1640 ASSERT_NOT_REACHED();
1641 }
1642 return dst;
1643}
1644
1645RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1646{
1647 switch (opcodeID) {
1648 case op_eq:
1649 return emitBinaryOp<OpEq>(dst, src1, src2, types);
1650 case op_neq:
1651 return emitBinaryOp<OpNeq>(dst, src1, src2, types);
1652 case op_stricteq:
1653 return emitBinaryOp<OpStricteq>(dst, src1, src2, types);
1654 case op_nstricteq:
1655 return emitBinaryOp<OpNstricteq>(dst, src1, src2, types);
1656 case op_less:
1657 return emitBinaryOp<OpLess>(dst, src1, src2, types);
1658 case op_lesseq:
1659 return emitBinaryOp<OpLesseq>(dst, src1, src2, types);
1660 case op_greater:
1661 return emitBinaryOp<OpGreater>(dst, src1, src2, types);
1662 case op_greatereq:
1663 return emitBinaryOp<OpGreatereq>(dst, src1, src2, types);
1664 case op_below:
1665 return emitBinaryOp<OpBelow>(dst, src1, src2, types);
1666 case op_beloweq:
1667 return emitBinaryOp<OpBeloweq>(dst, src1, src2, types);
1668 case op_mod:
1669 return emitBinaryOp<OpMod>(dst, src1, src2, types);
1670 case op_pow:
1671 return emitBinaryOp<OpPow>(dst, src1, src2, types);
1672 case op_lshift:
1673 return emitBinaryOp<OpLshift>(dst, src1, src2, types);
1674 case op_rshift:
1675 return emitBinaryOp<OpRshift>(dst, src1, src2, types);
1676 case op_urshift:
1677 return emitBinaryOp<OpUrshift>(dst, src1, src2, types);
1678 case op_add:
1679 return emitBinaryOp<OpAdd>(dst, src1, src2, types);
1680 case op_mul:
1681 return emitBinaryOp<OpMul>(dst, src1, src2, types);
1682 case op_div:
1683 return emitBinaryOp<OpDiv>(dst, src1, src2, types);
1684 case op_sub:
1685 return emitBinaryOp<OpSub>(dst, src1, src2, types);
1686 case op_bitand:
1687 return emitBinaryOp<OpBitand>(dst, src1, src2, types);
1688 case op_bitxor:
1689 return emitBinaryOp<OpBitxor>(dst, src1, src2, types);
1690 case op_bitor:
1691 return emitBinaryOp<OpBitor>(dst, src1, src2, types);
1692 default:
1693 ASSERT_NOT_REACHED();
1694 return nullptr;
1695 }
1696}
1697
1698RegisterID* BytecodeGenerator::emitToObject(RegisterID* dst, RegisterID* src, const Identifier& message)
1699{
1700 OpToObject::emit(this, dst, src, addConstant(message));
1701 return dst;
1702}
1703
1704RegisterID* BytecodeGenerator::emitToNumber(RegisterID* dst, RegisterID* src)
1705{
1706 return emitUnaryOp<OpToNumber>(dst, src);
1707}
1708
1709RegisterID* BytecodeGenerator::emitToString(RegisterID* dst, RegisterID* src)
1710{
1711 return emitUnaryOp<OpToString>(dst, src);
1712}
1713
1714RegisterID* BytecodeGenerator::emitTypeOf(RegisterID* dst, RegisterID* src)
1715{
1716 return emitUnaryOp<OpTypeof>(dst, src);
1717}
1718
1719RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1720{
1721 OpInc::emit(this, srcDst);
1722 return srcDst;
1723}
1724
1725RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1726{
1727 OpDec::emit(this, srcDst);
1728 return srcDst;
1729}
1730
1731bool BytecodeGenerator::emitEqualityOpImpl(RegisterID* dst, RegisterID* src1, RegisterID* src2)
1732{
1733 if (m_lastInstruction->is<OpTypeof>()) {
1734 auto op = m_lastInstruction->as<OpTypeof>();
1735 if (src1->index() == op.m_dst.offset()
1736 && src1->isTemporary()
1737 && m_codeBlock->isConstantRegisterIndex(src2->index())
1738 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1739 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1740 if (value == "undefined") {
1741 rewind();
1742 OpIsUndefined::emit(this, dst, op.m_value);
1743 return true;
1744 }
1745 if (value == "boolean") {
1746 rewind();
1747 OpIsBoolean::emit(this, dst, op.m_value);
1748 return true;
1749 }
1750 if (value == "number") {
1751 rewind();
1752 OpIsNumber::emit(this, dst, op.m_value);
1753 return true;
1754 }
1755 if (value == "string") {
1756 rewind();
1757 OpIsCellWithType::emit(this, dst, op.m_value, StringType);
1758 return true;
1759 }
1760 if (value == "symbol") {
1761 rewind();
1762 OpIsCellWithType::emit(this, dst, op.m_value, SymbolType);
1763 return true;
1764 }
1765 if (Options::useBigInt() && value == "bigint") {
1766 rewind();
1767 OpIsCellWithType::emit(this, dst, op.m_value, BigIntType);
1768 return true;
1769 }
1770 if (value == "object") {
1771 rewind();
1772 OpIsObjectOrNull::emit(this, dst, op.m_value);
1773 return true;
1774 }
1775 if (value == "function") {
1776 rewind();
1777 OpIsFunction::emit(this, dst, op.m_value);
1778 return true;
1779 }
1780 }
1781 }
1782
1783 return false;
1784}
1785
1786void BytecodeGenerator::emitTypeProfilerExpressionInfo(const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1787{
1788 ASSERT(vm()->typeProfiler());
1789
1790 unsigned start = startDivot.offset; // Ranges are inclusive of their endpoints, AND 0 indexed.
1791 unsigned end = endDivot.offset - 1; // End Ranges already go one past the inclusive range, so subtract 1.
1792 unsigned instructionOffset = instructions().size() - 1;
1793 m_codeBlock->addTypeProfilerExpressionInfo(instructionOffset, start, end);
1794}
1795
1796void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag)
1797{
1798 if (!vm()->typeProfiler())
1799 return;
1800
1801 if (!registerToProfile)
1802 return;
1803
1804 OpProfileType::emit(this, registerToProfile, 0, flag, { }, resolveType());
1805
1806 // Don't emit expression info for this version of profile type. This generally means
1807 // we're profiling information for something that isn't in the actual text of a JavaScript
1808 // program. For example, implicit return undefined from a function call.
1809}
1810
1811void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1812{
1813 emitProfileType(registerToProfile, ProfileTypeBytecodeDoesNotHaveGlobalID, startDivot, endDivot);
1814}
1815
1816void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1817{
1818 if (!vm()->typeProfiler())
1819 return;
1820
1821 if (!registerToProfile)
1822 return;
1823
1824 OpProfileType::emit(this, registerToProfile, 0, flag, { }, resolveType());
1825 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1826}
1827
1828void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const Variable& var, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1829{
1830 if (!vm()->typeProfiler())
1831 return;
1832
1833 if (!registerToProfile)
1834 return;
1835
1836 ProfileTypeBytecodeFlag flag;
1837 int symbolTableOrScopeDepth;
1838 if (var.local() || var.offset().isScope()) {
1839 flag = ProfileTypeBytecodeLocallyResolved;
1840 ASSERT(var.symbolTableConstantIndex());
1841 symbolTableOrScopeDepth = var.symbolTableConstantIndex();
1842 } else {
1843 flag = ProfileTypeBytecodeClosureVar;
1844 symbolTableOrScopeDepth = localScopeDepth();
1845 }
1846
1847 OpProfileType::emit(this, registerToProfile, symbolTableOrScopeDepth, flag, addConstant(var.ident()), resolveType());
1848 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1849}
1850
1851void BytecodeGenerator::emitProfileControlFlow(int textOffset)
1852{
1853 if (vm()->controlFlowProfiler()) {
1854 RELEASE_ASSERT(textOffset >= 0);
1855
1856 OpProfileControlFlow::emit(this, textOffset);
1857 m_codeBlock->addOpProfileControlFlowBytecodeOffset(m_lastInstruction.offset());
1858 }
1859}
1860
1861unsigned BytecodeGenerator::addConstantIndex()
1862{
1863 unsigned index = m_nextConstantOffset;
1864 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1865 ++m_nextConstantOffset;
1866 return index;
1867}
1868
1869RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1870{
1871 return emitLoad(dst, jsBoolean(b));
1872}
1873
1874RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1875{
1876 ASSERT(!identifier.isSymbol());
1877 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1878 if (!stringInMap)
1879 stringInMap = jsOwnedString(vm(), identifier.string());
1880
1881 return emitLoad(dst, JSValue(stringInMap));
1882}
1883
1884RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1885{
1886 RegisterID* constantID = addConstantValue(v, sourceCodeRepresentation);
1887 if (dst)
1888 return move(dst, constantID);
1889 return constantID;
1890}
1891
1892RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, IdentifierSet& set)
1893{
1894 if (m_codeBlock->numberOfConstantIdentifierSets()) {
1895 for (const auto& entry : m_codeBlock->constantIdentifierSets()) {
1896 if (entry.first != set)
1897 continue;
1898
1899 return &m_constantPoolRegisters[entry.second];
1900 }
1901 }
1902
1903 unsigned index = addConstantIndex();
1904 m_codeBlock->addSetConstant(set);
1905 RegisterID* m_setRegister = &m_constantPoolRegisters[index];
1906
1907 if (dst)
1908 return move(dst, m_setRegister);
1909
1910 return m_setRegister;
1911}
1912
1913template<typename LookUpVarKindFunctor>
1914bool BytecodeGenerator::instantiateLexicalVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable, ScopeRegisterType scopeRegisterType, LookUpVarKindFunctor lookUpVarKind)
1915{
1916 bool hasCapturedVariables = false;
1917 {
1918 for (auto& entry : lexicalVariables) {
1919 ASSERT(entry.value.isLet() || entry.value.isConst() || entry.value.isFunction());
1920 ASSERT(!entry.value.isVar());
1921 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1922 ASSERT(symbolTableEntry.isNull());
1923
1924 // Imported bindings which are not the namespace bindings are not allocated
1925 // in the module environment as usual variables' way.
1926 // And since these types of the variables only seen in the module environment,
1927 // other lexical environment need not to take care this.
1928 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1929 continue;
1930
1931 VarKind varKind = lookUpVarKind(entry.key.get(), entry.value);
1932 VarOffset varOffset;
1933 if (varKind == VarKind::Scope) {
1934 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
1935 hasCapturedVariables = true;
1936 } else {
1937 ASSERT(varKind == VarKind::Stack);
1938 RegisterID* local;
1939 if (scopeRegisterType == ScopeRegisterType::Block) {
1940 local = newBlockScopeVariable();
1941 local->ref();
1942 } else
1943 local = addVar();
1944 varOffset = VarOffset(local->virtualRegister());
1945 }
1946
1947 SymbolTableEntry newEntry(varOffset, static_cast<unsigned>(entry.value.isConst() ? PropertyAttribute::ReadOnly : PropertyAttribute::None));
1948 symbolTable->add(NoLockingNecessary, entry.key.get(), newEntry);
1949 }
1950 }
1951 return hasCapturedVariables;
1952}
1953
1954void BytecodeGenerator::emitPrefillStackTDZVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable)
1955{
1956 // Prefill stack variables with the TDZ empty value.
1957 // Scope variables will be initialized to the TDZ empty value when JSLexicalEnvironment is allocated.
1958 for (auto& entry : lexicalVariables) {
1959 // Imported bindings which are not the namespace bindings are not allocated
1960 // in the module environment as usual variables' way.
1961 // And since these types of the variables only seen in the module environment,
1962 // other lexical environment need not to take care this.
1963 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1964 continue;
1965
1966 if (entry.value.isFunction())
1967 continue;
1968
1969 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1970 ASSERT(!symbolTableEntry.isNull());
1971 VarOffset offset = symbolTableEntry.varOffset();
1972 if (offset.isScope())
1973 continue;
1974
1975 ASSERT(offset.isStack());
1976 moveEmptyValue(&registerFor(offset.stackOffset()));
1977 }
1978}
1979
1980void BytecodeGenerator::pushLexicalScope(VariableEnvironmentNode* node, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType, RegisterID** constantSymbolTableResult, bool shouldInitializeBlockScopedFunctions)
1981{
1982 VariableEnvironment& environment = node->lexicalVariables();
1983 RegisterID* constantSymbolTableResultTemp = nullptr;
1984 pushLexicalScopeInternal(environment, tdzCheckOptimization, nestedScopeType, &constantSymbolTableResultTemp, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1985
1986 if (shouldInitializeBlockScopedFunctions)
1987 initializeBlockScopedFunctions(environment, node->functionStack(), constantSymbolTableResultTemp);
1988
1989 if (constantSymbolTableResult && constantSymbolTableResultTemp)
1990 *constantSymbolTableResult = constantSymbolTableResultTemp;
1991}
1992
1993void BytecodeGenerator::pushLexicalScopeInternal(VariableEnvironment& environment, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType,
1994 RegisterID** constantSymbolTableResult, TDZRequirement tdzRequirement, ScopeType scopeType, ScopeRegisterType scopeRegisterType)
1995{
1996 if (!environment.size())
1997 return;
1998
1999 if (m_shouldEmitDebugHooks)
2000 environment.markAllVariablesAsCaptured();
2001
2002 SymbolTable* symbolTable = SymbolTable::create(*m_vm);
2003 switch (scopeType) {
2004 case ScopeType::CatchScope:
2005 symbolTable->setScopeType(SymbolTable::ScopeType::CatchScope);
2006 break;
2007 case ScopeType::LetConstScope:
2008 symbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
2009 break;
2010 case ScopeType::FunctionNameScope:
2011 symbolTable->setScopeType(SymbolTable::ScopeType::FunctionNameScope);
2012 break;
2013 }
2014
2015 if (nestedScopeType == NestedScopeType::IsNested)
2016 symbolTable->markIsNestedLexicalScope();
2017
2018 auto lookUpVarKind = [] (UniquedStringImpl*, const VariableEnvironmentEntry& entry) -> VarKind {
2019 return entry.isCaptured() ? VarKind::Scope : VarKind::Stack;
2020 };
2021
2022 bool hasCapturedVariables = instantiateLexicalVariables(environment, symbolTable, scopeRegisterType, lookUpVarKind);
2023
2024 RegisterID* newScope = nullptr;
2025 RegisterID* constantSymbolTable = nullptr;
2026 int symbolTableConstantIndex = 0;
2027 if (vm()->typeProfiler()) {
2028 constantSymbolTable = addConstantValue(symbolTable);
2029 symbolTableConstantIndex = constantSymbolTable->index();
2030 }
2031 if (hasCapturedVariables) {
2032 if (scopeRegisterType == ScopeRegisterType::Block) {
2033 newScope = newBlockScopeVariable();
2034 newScope->ref();
2035 } else
2036 newScope = addVar();
2037 if (!constantSymbolTable) {
2038 ASSERT(!vm()->typeProfiler());
2039 constantSymbolTable = addConstantValue(symbolTable->cloneScopePart(*m_vm));
2040 symbolTableConstantIndex = constantSymbolTable->index();
2041 }
2042 if (constantSymbolTableResult)
2043 *constantSymbolTableResult = constantSymbolTable;
2044
2045 OpCreateLexicalEnvironment::emit(this, newScope, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(tdzRequirement == TDZRequirement::UnderTDZ ? jsTDZValue() : jsUndefined()));
2046
2047 move(scopeRegister(), newScope);
2048
2049 pushLocalControlFlowScope();
2050 }
2051
2052 bool isWithScope = false;
2053 m_lexicalScopeStack.append({ symbolTable, newScope, isWithScope, symbolTableConstantIndex });
2054 pushTDZVariables(environment, tdzCheckOptimization, tdzRequirement);
2055
2056 if (tdzRequirement == TDZRequirement::UnderTDZ)
2057 emitPrefillStackTDZVariables(environment, symbolTable);
2058}
2059
2060void BytecodeGenerator::initializeBlockScopedFunctions(VariableEnvironment& environment, FunctionStack& functionStack, RegisterID* constantSymbolTable)
2061{
2062 /*
2063 * We must transform block scoped function declarations in strict mode like so:
2064 *
2065 * function foo() {
2066 * if (c) {
2067 * function foo() { ... }
2068 * if (bar) { ... }
2069 * else { ... }
2070 * function baz() { ... }
2071 * }
2072 * }
2073 *
2074 * to:
2075 *
2076 * function foo() {
2077 * if (c) {
2078 * let foo = function foo() { ... }
2079 * let baz = function baz() { ... }
2080 * if (bar) { ... }
2081 * else { ... }
2082 * }
2083 * }
2084 *
2085 * But without the TDZ checks.
2086 */
2087
2088 if (!environment.size()) {
2089 RELEASE_ASSERT(!functionStack.size());
2090 return;
2091 }
2092
2093 if (!functionStack.size())
2094 return;
2095
2096 SymbolTable* symbolTable = m_lexicalScopeStack.last().m_symbolTable;
2097 RegisterID* scope = m_lexicalScopeStack.last().m_scope;
2098 RefPtr<RegisterID> temp = newTemporary();
2099 int symbolTableIndex = constantSymbolTable ? constantSymbolTable->index() : 0;
2100 for (FunctionMetadataNode* function : functionStack) {
2101 const Identifier& name = function->ident();
2102 auto iter = environment.find(name.impl());
2103 RELEASE_ASSERT(iter != environment.end());
2104 RELEASE_ASSERT(iter->value.isFunction());
2105 // We purposefully don't hold the symbol table lock around this loop because emitNewFunctionExpressionCommon may GC.
2106 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, name.impl());
2107 RELEASE_ASSERT(!entry.isNull());
2108 emitNewFunctionExpressionCommon(temp.get(), function);
2109 bool isLexicallyScoped = true;
2110 emitPutToScope(scope, variableForLocalEntry(name, entry, symbolTableIndex, isLexicallyScoped), temp.get(), DoNotThrowIfNotFound, InitializationMode::Initialization);
2111 }
2112}
2113
2114void BytecodeGenerator::hoistSloppyModeFunctionIfNecessary(const Identifier& functionName)
2115{
2116 if (m_scopeNode->hasSloppyModeHoistedFunction(functionName.impl())) {
2117 if (codeType() != EvalCode) {
2118 Variable currentFunctionVariable = variable(functionName);
2119 RefPtr<RegisterID> currentValue;
2120 if (RegisterID* local = currentFunctionVariable.local())
2121 currentValue = local;
2122 else {
2123 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2124 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2125 }
2126
2127 ASSERT(m_varScopeLexicalScopeStackIndex);
2128 ASSERT(*m_varScopeLexicalScopeStackIndex < m_lexicalScopeStack.size());
2129 LexicalScopeStackEntry varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex];
2130 SymbolTable* varSymbolTable = varScope.m_symbolTable;
2131 ASSERT(varSymbolTable->scopeType() == SymbolTable::ScopeType::VarScope);
2132 SymbolTableEntry entry = varSymbolTable->get(NoLockingNecessary, functionName.impl());
2133 if (functionName == propertyNames().arguments && entry.isNull()) {
2134 // "arguments" might be put in the parameter scope when we have a non-simple
2135 // parameter list since "arguments" is visible to expressions inside the
2136 // parameter evaluation list.
2137 // e.g:
2138 // function foo(x = arguments) { { function arguments() { } } }
2139 RELEASE_ASSERT(*m_varScopeLexicalScopeStackIndex > 0);
2140 varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex - 1];
2141 SymbolTable* parameterSymbolTable = varScope.m_symbolTable;
2142 entry = parameterSymbolTable->get(NoLockingNecessary, functionName.impl());
2143 }
2144 RELEASE_ASSERT(!entry.isNull());
2145 bool isLexicallyScoped = false;
2146 emitPutToScope(varScope.m_scope, variableForLocalEntry(functionName, entry, varScope.m_symbolTableConstantIndex, isLexicallyScoped), currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2147 } else {
2148 Variable currentFunctionVariable = variable(functionName);
2149 RefPtr<RegisterID> currentValue;
2150 if (RegisterID* local = currentFunctionVariable.local())
2151 currentValue = local;
2152 else {
2153 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2154 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2155 }
2156
2157 RefPtr<RegisterID> scopeId = emitResolveScopeForHoistingFuncDeclInEval(nullptr, functionName);
2158 RefPtr<RegisterID> checkResult = emitIsUndefined(newTemporary(), scopeId.get());
2159
2160 Ref<Label> isNotVarScopeLabel = newLabel();
2161 emitJumpIfTrue(checkResult.get(), isNotVarScopeLabel.get());
2162
2163 // Put to outer scope
2164 emitPutToScope(scopeId.get(), functionName, currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2165 emitLabel(isNotVarScopeLabel.get());
2166
2167 }
2168 }
2169}
2170
2171RegisterID* BytecodeGenerator::emitResolveScopeForHoistingFuncDeclInEval(RegisterID* dst, const Identifier& property)
2172{
2173 ASSERT(m_codeType == EvalCode);
2174
2175 dst = finalDestination(dst);
2176 OpResolveScopeForHoistingFuncDeclInEval::emit(this, kill(dst), m_topMostScope, addConstant(property));
2177 return dst;
2178}
2179
2180void BytecodeGenerator::popLexicalScope(VariableEnvironmentNode* node)
2181{
2182 VariableEnvironment& environment = node->lexicalVariables();
2183 popLexicalScopeInternal(environment);
2184}
2185
2186void BytecodeGenerator::popLexicalScopeInternal(VariableEnvironment& environment)
2187{
2188 // NOTE: This function only makes sense for scopes that aren't ScopeRegisterType::Var (only function name scope right now is ScopeRegisterType::Var).
2189 // This doesn't make sense for ScopeRegisterType::Var because we deref RegisterIDs here.
2190 if (!environment.size())
2191 return;
2192
2193 if (m_shouldEmitDebugHooks)
2194 environment.markAllVariablesAsCaptured();
2195
2196 auto stackEntry = m_lexicalScopeStack.takeLast();
2197 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2198 bool hasCapturedVariables = false;
2199 for (auto& entry : environment) {
2200 if (entry.value.isCaptured()) {
2201 hasCapturedVariables = true;
2202 continue;
2203 }
2204 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
2205 ASSERT(!symbolTableEntry.isNull());
2206 VarOffset offset = symbolTableEntry.varOffset();
2207 ASSERT(offset.isStack());
2208 RegisterID* local = &registerFor(offset.stackOffset());
2209 local->deref();
2210 }
2211
2212 if (hasCapturedVariables) {
2213 RELEASE_ASSERT(stackEntry.m_scope);
2214 emitPopScope(scopeRegister(), stackEntry.m_scope);
2215 popLocalControlFlowScope();
2216 stackEntry.m_scope->deref();
2217 }
2218
2219 m_TDZStack.removeLast();
2220 m_cachedVariablesUnderTDZ = { };
2221}
2222
2223void BytecodeGenerator::prepareLexicalScopeForNextForLoopIteration(VariableEnvironmentNode* node, RegisterID* loopSymbolTable)
2224{
2225 VariableEnvironment& environment = node->lexicalVariables();
2226 if (!environment.size())
2227 return;
2228 if (m_shouldEmitDebugHooks)
2229 environment.markAllVariablesAsCaptured();
2230 if (!environment.hasCapturedVariables())
2231 return;
2232
2233 RELEASE_ASSERT(loopSymbolTable);
2234
2235 // This function needs to do setup for a for loop's activation if any of
2236 // the for loop's lexically declared variables are captured (that is, variables
2237 // declared in the loop header, not the loop body). This function needs to
2238 // make a copy of the current activation and copy the values from the previous
2239 // activation into the new activation because each iteration of a for loop
2240 // gets a new activation.
2241
2242 auto stackEntry = m_lexicalScopeStack.last();
2243 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2244 RegisterID* loopScope = stackEntry.m_scope;
2245 ASSERT(symbolTable->scopeSize());
2246 ASSERT(loopScope);
2247 Vector<std::pair<RegisterID*, Identifier>> activationValuesToCopyOver;
2248
2249 {
2250 activationValuesToCopyOver.reserveInitialCapacity(symbolTable->scopeSize());
2251
2252 for (auto end = symbolTable->end(NoLockingNecessary), ptr = symbolTable->begin(NoLockingNecessary); ptr != end; ++ptr) {
2253 if (!ptr->value.varOffset().isScope())
2254 continue;
2255
2256 RefPtr<UniquedStringImpl> ident = ptr->key;
2257 Identifier identifier = Identifier::fromUid(m_vm, ident.get());
2258
2259 RegisterID* transitionValue = newBlockScopeVariable();
2260 transitionValue->ref();
2261 emitGetFromScope(transitionValue, loopScope, variableForLocalEntry(identifier, ptr->value, loopSymbolTable->index(), true), DoNotThrowIfNotFound);
2262 activationValuesToCopyOver.uncheckedAppend(std::make_pair(transitionValue, identifier));
2263 }
2264 }
2265
2266 // We need this dynamic behavior of the executing code to ensure
2267 // each loop iteration has a new activation object. (It's pretty ugly).
2268 // Also, this new activation needs to be assigned to the same register
2269 // as the previous scope because the loop body is compiled under
2270 // the assumption that the scope's register index is constant even
2271 // though the value in that register will change on each loop iteration.
2272 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), loopScope);
2273 move(scopeRegister(), parentScope.get());
2274
2275 OpCreateLexicalEnvironment::emit(this, loopScope, scopeRegister(), loopSymbolTable, addConstantValue(jsTDZValue()));
2276
2277 move(scopeRegister(), loopScope);
2278
2279 {
2280 for (auto pair : activationValuesToCopyOver) {
2281 const Identifier& identifier = pair.second;
2282 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, identifier.impl());
2283 RELEASE_ASSERT(!entry.isNull());
2284 RegisterID* transitionValue = pair.first;
2285 emitPutToScope(loopScope, variableForLocalEntry(identifier, entry, loopSymbolTable->index(), true), transitionValue, DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2286 transitionValue->deref();
2287 }
2288 }
2289}
2290
2291Variable BytecodeGenerator::variable(const Identifier& property, ThisResolutionType thisResolutionType)
2292{
2293 if (property == propertyNames().thisIdentifier && thisResolutionType == ThisResolutionType::Local)
2294 return Variable(property, VarOffset(thisRegister()->virtualRegister()), thisRegister(), static_cast<unsigned>(PropertyAttribute::ReadOnly), Variable::SpecialVariable, 0, false);
2295
2296 // We can optimize lookups if the lexical variable is found before a "with" or "catch"
2297 // scope because we're guaranteed static resolution. If we have to pass through
2298 // a "with" or "catch" scope we loose this guarantee.
2299 // We can't optimize cases like this:
2300 // {
2301 // let x = ...;
2302 // with (o) {
2303 // doSomethingWith(x);
2304 // }
2305 // }
2306 // Because we can't gaurantee static resolution on x.
2307 // But, in this case, we are guaranteed static resolution:
2308 // {
2309 // let x = ...;
2310 // with (o) {
2311 // let x = ...;
2312 // doSomethingWith(x);
2313 // }
2314 // }
2315 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2316 auto& stackEntry = m_lexicalScopeStack[i];
2317 if (stackEntry.m_isWithScope)
2318 return Variable(property);
2319 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2320 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, property.impl());
2321 if (symbolTableEntry.isNull())
2322 continue;
2323 bool resultIsCallee = false;
2324 if (symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2325 if (m_usesNonStrictEval) {
2326 // We don't know if an eval has introduced a "var" named the same thing as the function name scope variable name.
2327 // We resort to dynamic lookup to answer this question.
2328 Variable result = Variable(property);
2329 return result;
2330 }
2331 resultIsCallee = true;
2332 }
2333 Variable result = variableForLocalEntry(property, symbolTableEntry, stackEntry.m_symbolTableConstantIndex, symbolTable->scopeType() == SymbolTable::ScopeType::LexicalScope);
2334 if (resultIsCallee)
2335 result.setIsReadOnly();
2336 return result;
2337 }
2338
2339 return Variable(property);
2340}
2341
2342Variable BytecodeGenerator::variableForLocalEntry(
2343 const Identifier& property, const SymbolTableEntry& entry, int symbolTableConstantIndex, bool isLexicallyScoped)
2344{
2345 VarOffset offset = entry.varOffset();
2346
2347 RegisterID* local;
2348 if (offset.isStack())
2349 local = &registerFor(offset.stackOffset());
2350 else
2351 local = nullptr;
2352
2353 return Variable(property, offset, local, entry.getAttributes(), Variable::NormalVariable, symbolTableConstantIndex, isLexicallyScoped);
2354}
2355
2356void BytecodeGenerator::createVariable(
2357 const Identifier& property, VarKind varKind, SymbolTable* symbolTable, ExistingVariableMode existingVariableMode)
2358{
2359 ASSERT(property != propertyNames().thisIdentifier);
2360 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, property.impl());
2361
2362 if (!entry.isNull()) {
2363 if (existingVariableMode == IgnoreExisting)
2364 return;
2365
2366 // Do some checks to ensure that the variable we're being asked to create is sufficiently
2367 // compatible with the one we have already created.
2368
2369 VarOffset offset = entry.varOffset();
2370
2371 // We can't change our minds about whether it's captured.
2372 if (offset.kind() != varKind) {
2373 dataLog(
2374 "Trying to add variable called ", property, " as ", varKind,
2375 " but it was already added as ", offset, ".\n");
2376 RELEASE_ASSERT_NOT_REACHED();
2377 }
2378
2379 return;
2380 }
2381
2382 VarOffset varOffset;
2383 if (varKind == VarKind::Scope)
2384 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
2385 else {
2386 ASSERT(varKind == VarKind::Stack);
2387 varOffset = VarOffset(virtualRegisterForLocal(m_calleeLocals.size()));
2388 }
2389 SymbolTableEntry newEntry(varOffset, 0);
2390 symbolTable->add(NoLockingNecessary, property.impl(), newEntry);
2391
2392 if (varKind == VarKind::Stack) {
2393 RegisterID* local = addVar();
2394 RELEASE_ASSERT(local->index() == varOffset.stackOffset().offset());
2395 }
2396}
2397
2398RegisterID* BytecodeGenerator::emitOverridesHasInstance(RegisterID* dst, RegisterID* constructor, RegisterID* hasInstanceValue)
2399{
2400 OpOverridesHasInstance::emit(this, dst, constructor, hasInstanceValue);
2401 return dst;
2402}
2403
2404// Indicates the least upper bound of resolve type based on local scope. The bytecode linker
2405// will start with this ResolveType and compute the least upper bound including intercepting scopes.
2406ResolveType BytecodeGenerator::resolveType()
2407{
2408 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2409 if (m_lexicalScopeStack[i].m_isWithScope)
2410 return Dynamic;
2411 if (m_usesNonStrictEval && m_lexicalScopeStack[i].m_symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2412 // We never want to assign to a FunctionNameScope. Returning Dynamic here achieves this goal.
2413 // If we aren't in non-strict eval mode, then NodesCodeGen needs to take care not to emit
2414 // a put_to_scope with the destination being the function name scope variable.
2415 return Dynamic;
2416 }
2417 }
2418
2419 if (m_usesNonStrictEval)
2420 return GlobalPropertyWithVarInjectionChecks;
2421 return GlobalProperty;
2422}
2423
2424RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Variable& variable)
2425{
2426 switch (variable.offset().kind()) {
2427 case VarKind::Stack:
2428 return nullptr;
2429
2430 case VarKind::DirectArgument:
2431 return argumentsRegister();
2432
2433 case VarKind::Scope: {
2434 // This always refers to the activation that *we* allocated, and not the current scope that code
2435 // lives in. Note that this will change once we have proper support for block scoping. Once that
2436 // changes, it will be correct for this code to return scopeRegister(). The only reason why we
2437 // don't do that already is that m_lexicalEnvironment is required by ConstDeclNode. ConstDeclNode
2438 // requires weird things because it is a shameful pile of nonsense, but block scoping would make
2439 // that code sensible and obviate the need for us to do bad things.
2440 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2441 auto& stackEntry = m_lexicalScopeStack[i];
2442 // We should not resolve a variable to VarKind::Scope if a "with" scope lies in between the current
2443 // scope and the resolved scope.
2444 RELEASE_ASSERT(!stackEntry.m_isWithScope);
2445
2446 if (stackEntry.m_symbolTable->get(NoLockingNecessary, variable.ident().impl()).isNull())
2447 continue;
2448
2449 RegisterID* scope = stackEntry.m_scope;
2450 RELEASE_ASSERT(scope);
2451 return scope;
2452 }
2453
2454 RELEASE_ASSERT_NOT_REACHED();
2455 return nullptr;
2456
2457 }
2458 case VarKind::Invalid:
2459 // Indicates non-local resolution.
2460
2461 dst = tempDestination(dst);
2462 OpResolveScope::emit(this, kill(dst), scopeRegister(), addConstant(variable.ident()), resolveType(), localScopeDepth());
2463 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2464 return dst;
2465 }
2466
2467 RELEASE_ASSERT_NOT_REACHED();
2468 return nullptr;
2469}
2470
2471RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Variable& variable, ResolveMode resolveMode)
2472{
2473 switch (variable.offset().kind()) {
2474 case VarKind::Stack:
2475 return move(dst, variable.local());
2476
2477 case VarKind::DirectArgument: {
2478 OpGetFromArguments::emit(this, kill(dst), scope, variable.offset().capturedArgumentsOffset().offset());
2479 return dst;
2480 }
2481
2482 case VarKind::Scope:
2483 case VarKind::Invalid: {
2484 OpGetFromScope::emit(
2485 this,
2486 kill(dst),
2487 scope,
2488 addConstant(variable.ident()),
2489 GetPutInfo(resolveMode, variable.offset().isScope() ? LocalClosureVar : resolveType(), InitializationMode::NotInitialization),
2490 localScopeDepth(),
2491 variable.offset().isScope() ? variable.offset().scopeOffset().offset() : 0);
2492 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2493 return dst;
2494 } }
2495
2496 RELEASE_ASSERT_NOT_REACHED();
2497}
2498
2499RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Variable& variable, RegisterID* value, ResolveMode resolveMode, InitializationMode initializationMode)
2500{
2501 switch (variable.offset().kind()) {
2502 case VarKind::Stack:
2503 move(variable.local(), value);
2504 return value;
2505
2506 case VarKind::DirectArgument:
2507 OpPutToArguments::emit(this, scope, variable.offset().capturedArgumentsOffset().offset(), value);
2508 return value;
2509
2510 case VarKind::Scope:
2511 case VarKind::Invalid: {
2512 GetPutInfo getPutInfo(0);
2513 int scopeDepth;
2514 ScopeOffset offset;
2515 if (variable.offset().isScope()) {
2516 offset = variable.offset().scopeOffset();
2517 getPutInfo = GetPutInfo(resolveMode, LocalClosureVar, initializationMode);
2518 scopeDepth = variable.symbolTableConstantIndex();
2519 } else {
2520 ASSERT(resolveType() != LocalClosureVar);
2521 getPutInfo = GetPutInfo(resolveMode, resolveType(), initializationMode);
2522 scopeDepth = localScopeDepth();
2523 }
2524 OpPutToScope::emit(this, scope, addConstant(variable.ident()), value, getPutInfo, scopeDepth, !!offset ? offset.offset() : 0);
2525 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2526 return value;
2527 } }
2528
2529 RELEASE_ASSERT_NOT_REACHED();
2530}
2531
2532RegisterID* BytecodeGenerator::initializeVariable(const Variable& variable, RegisterID* value)
2533{
2534 RELEASE_ASSERT(variable.offset().kind() != VarKind::Invalid);
2535 RegisterID* scope = emitResolveScope(nullptr, variable);
2536 return emitPutToScope(scope, variable, value, ThrowIfNotFound, InitializationMode::NotInitialization);
2537}
2538
2539RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
2540{
2541 OpInstanceof::emit(this, dst, value, basePrototype);
2542 return dst;
2543}
2544
2545RegisterID* BytecodeGenerator::emitInstanceOfCustom(RegisterID* dst, RegisterID* value, RegisterID* constructor, RegisterID* hasInstanceValue)
2546{
2547 OpInstanceofCustom::emit(this, dst, value, constructor, hasInstanceValue);
2548 return dst;
2549}
2550
2551RegisterID* BytecodeGenerator::emitInByVal(RegisterID* dst, RegisterID* property, RegisterID* base)
2552{
2553 OpInByVal::emit(this, dst, base, property);
2554 return dst;
2555}
2556
2557RegisterID* BytecodeGenerator::emitInById(RegisterID* dst, RegisterID* base, const Identifier& property)
2558{
2559 OpInById::emit(this, dst, base, addConstant(property));
2560 return dst;
2561}
2562
2563RegisterID* BytecodeGenerator::emitTryGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2564{
2565 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties are not supported with tryGetById.");
2566
2567 OpTryGetById::emit(this, kill(dst), base, addConstant(property));
2568 return dst;
2569}
2570
2571RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2572{
2573 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2574
2575 OpGetById::emit(this, kill(dst), base, addConstant(property));
2576 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2577 return dst;
2578}
2579
2580RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, RegisterID* thisVal, const Identifier& property)
2581{
2582 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2583
2584 OpGetByIdWithThis::emit(this, kill(dst), base, thisVal, addConstant(property));
2585 return dst;
2586}
2587
2588RegisterID* BytecodeGenerator::emitDirectGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2589{
2590 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val_direct.");
2591
2592 OpGetByIdDirect::emit(this, kill(dst), base, addConstant(property));
2593 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2594 return dst;
2595}
2596
2597RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
2598{
2599 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2600
2601 unsigned propertyIndex = addConstant(property);
2602
2603 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2604
2605 OpPutById::emit(this, base, propertyIndex, value, PutByIdNone); // is not direct
2606 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2607
2608 return value;
2609}
2610
2611RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, RegisterID* thisValue, const Identifier& property, RegisterID* value)
2612{
2613 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2614
2615 unsigned propertyIndex = addConstant(property);
2616
2617 OpPutByIdWithThis::emit(this, base, thisValue, propertyIndex, value);
2618
2619 return value;
2620}
2621
2622RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value, PropertyNode::PutType putType)
2623{
2624 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val(direct).");
2625
2626 unsigned propertyIndex = addConstant(property);
2627
2628 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2629
2630 PutByIdFlags type = (putType == PropertyNode::KnownDirect || property != m_vm->propertyNames->underscoreProto) ? PutByIdIsDirect : PutByIdNone;
2631 OpPutById::emit(this, base, propertyIndex, value, type);
2632 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2633 return value;
2634}
2635
2636void BytecodeGenerator::emitPutGetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter)
2637{
2638 unsigned propertyIndex = addConstant(property);
2639 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2640
2641 OpPutGetterById::emit(this, base, propertyIndex, attributes, getter);
2642}
2643
2644void BytecodeGenerator::emitPutSetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* setter)
2645{
2646 unsigned propertyIndex = addConstant(property);
2647 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2648
2649 OpPutSetterById::emit(this, base, propertyIndex, attributes, setter);
2650}
2651
2652void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter, RegisterID* setter)
2653{
2654 unsigned propertyIndex = addConstant(property);
2655
2656 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2657
2658 OpPutGetterSetterById::emit(this, base, propertyIndex, attributes, getter, setter);
2659}
2660
2661void BytecodeGenerator::emitPutGetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* getter)
2662{
2663 OpPutGetterByVal::emit(this, base, property, attributes, getter);
2664}
2665
2666void BytecodeGenerator::emitPutSetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* setter)
2667{
2668 OpPutSetterByVal::emit(this, base, property, attributes, setter);
2669}
2670
2671void BytecodeGenerator::emitPutGeneratorFields(RegisterID* nextFunction)
2672{
2673 // FIXME: Currently, we just create an object and store generator related fields as its properties for ease.
2674 // But to make it efficient, we will introduce JSGenerator class, add opcode new_generator and use its C++ fields instead of these private properties.
2675 // https://bugs.webkit.org/show_bug.cgi?id=151545
2676
2677 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2678
2679 // We do not store 'this' in arrow function within constructor,
2680 // because it might be not initialized, if super is called later.
2681 if (!(isDerivedConstructorContext() && m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionMode))
2682 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2683
2684 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(0)), PropertyNode::KnownDirect);
2685
2686 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2687}
2688
2689void BytecodeGenerator::emitPutAsyncGeneratorFields(RegisterID* nextFunction)
2690{
2691 ASSERT(isAsyncGeneratorWrapperParseMode(parseMode()));
2692
2693 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2694
2695 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2696
2697 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorState::SuspendedStart))), PropertyNode::KnownDirect);
2698
2699 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2700
2701 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::None))), PropertyNode::KnownDirect);
2702
2703 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueFirstPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2704 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueLastPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2705}
2706
2707RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
2708{
2709 OpDelById::emit(this, dst, base, addConstant(property));
2710 return dst;
2711}
2712
2713RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2714{
2715 for (size_t i = m_forInContextStack.size(); i--; ) {
2716 ForInContext& context = m_forInContextStack[i].get();
2717 if (context.local() != property)
2718 continue;
2719
2720 if (context.isIndexedForInContext()) {
2721 auto& indexedContext = context.asIndexedForInContext();
2722 OpGetByVal::emit<OpcodeSize::Wide>(this, kill(dst), base, indexedContext.index());
2723 indexedContext.addGetInst(m_lastInstruction.offset(), property->index());
2724 return dst;
2725 }
2726
2727 StructureForInContext& structureContext = context.asStructureForInContext();
2728 OpGetDirectPname::emit<OpcodeSize::Wide>(this, kill(dst), base, property, structureContext.index(), structureContext.enumerator());
2729
2730 structureContext.addGetInst(m_lastInstruction.offset(), property->index());
2731 return dst;
2732 }
2733
2734 OpGetByVal::emit(this, kill(dst), base, property);
2735 return dst;
2736}
2737
2738RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* thisValue, RegisterID* property)
2739{
2740 OpGetByValWithThis::emit(this, kill(dst), base, thisValue, property);
2741 return dst;
2742}
2743
2744RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2745{
2746 OpPutByVal::emit(this, base, property, value);
2747 return value;
2748}
2749
2750RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* thisValue, RegisterID* property, RegisterID* value)
2751{
2752 OpPutByValWithThis::emit(this, base, thisValue, property, value);
2753 return value;
2754}
2755
2756RegisterID* BytecodeGenerator::emitDirectPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2757{
2758 OpPutByValDirect::emit(this, base, property, value);
2759 return value;
2760}
2761
2762RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2763{
2764 OpDelByVal::emit(this, dst, base, property);
2765 return dst;
2766}
2767
2768void BytecodeGenerator::emitSuperSamplerBegin()
2769{
2770 OpSuperSamplerBegin::emit(this);
2771}
2772
2773void BytecodeGenerator::emitSuperSamplerEnd()
2774{
2775 OpSuperSamplerEnd::emit(this);
2776}
2777
2778RegisterID* BytecodeGenerator::emitIdWithProfile(RegisterID* src, SpeculatedType profile)
2779{
2780 OpIdentityWithProfile::emit(this, src, static_cast<uint32_t>(profile >> 32), static_cast<uint32_t>(profile));
2781 return src;
2782}
2783
2784void BytecodeGenerator::emitUnreachable()
2785{
2786 OpUnreachable::emit(this);
2787}
2788
2789RegisterID* BytecodeGenerator::emitGetArgument(RegisterID* dst, int32_t index)
2790{
2791 OpGetArgument::emit(this, dst, index + 1 /* Including |this| */);
2792 return dst;
2793}
2794
2795RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
2796{
2797 OpCreateThis::emit(this, dst, dst, 0);
2798 m_staticPropertyAnalyzer.createThis(dst, m_lastInstruction);
2799
2800 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2801 return dst;
2802}
2803
2804void BytecodeGenerator::emitTDZCheck(RegisterID* target)
2805{
2806 OpCheckTdz::emit(this, target);
2807}
2808
2809bool BytecodeGenerator::needsTDZCheck(const Variable& variable)
2810{
2811 for (unsigned i = m_TDZStack.size(); i--;) {
2812 auto iter = m_TDZStack[i].find(variable.ident().impl());
2813 if (iter == m_TDZStack[i].end())
2814 continue;
2815 return iter->value != TDZNecessityLevel::NotNeeded;
2816 }
2817
2818 return false;
2819}
2820
2821void BytecodeGenerator::emitTDZCheckIfNecessary(const Variable& variable, RegisterID* target, RegisterID* scope)
2822{
2823 if (needsTDZCheck(variable)) {
2824 if (target)
2825 emitTDZCheck(target);
2826 else {
2827 RELEASE_ASSERT(!variable.isLocal() && scope);
2828 RefPtr<RegisterID> result = emitGetFromScope(newTemporary(), scope, variable, DoNotThrowIfNotFound);
2829 emitTDZCheck(result.get());
2830 }
2831 }
2832}
2833
2834void BytecodeGenerator::liftTDZCheckIfPossible(const Variable& variable)
2835{
2836 RefPtr<UniquedStringImpl> identifier(variable.ident().impl());
2837 for (unsigned i = m_TDZStack.size(); i--;) {
2838 auto iter = m_TDZStack[i].find(identifier);
2839 if (iter != m_TDZStack[i].end()) {
2840 if (iter->value == TDZNecessityLevel::Optimize) {
2841 m_cachedVariablesUnderTDZ = { };
2842 iter->value = TDZNecessityLevel::NotNeeded;
2843 }
2844 break;
2845 }
2846 }
2847}
2848
2849void BytecodeGenerator::pushTDZVariables(const VariableEnvironment& environment, TDZCheckOptimization optimization, TDZRequirement requirement)
2850{
2851 if (!environment.size())
2852 return;
2853
2854 TDZNecessityLevel level;
2855 if (requirement == TDZRequirement::UnderTDZ) {
2856 if (optimization == TDZCheckOptimization::Optimize)
2857 level = TDZNecessityLevel::Optimize;
2858 else
2859 level = TDZNecessityLevel::DoNotOptimize;
2860 } else
2861 level = TDZNecessityLevel::NotNeeded;
2862
2863 TDZMap map;
2864 for (const auto& entry : environment)
2865 map.add(entry.key, entry.value.isFunction() ? TDZNecessityLevel::NotNeeded : level);
2866
2867 m_TDZStack.append(WTFMove(map));
2868 m_cachedVariablesUnderTDZ = { };
2869}
2870
2871Optional<CompactVariableMap::Handle> BytecodeGenerator::getVariablesUnderTDZ()
2872{
2873 if (m_cachedVariablesUnderTDZ) {
2874 if (!m_hasCachedVariablesUnderTDZ) {
2875 ASSERT(m_cachedVariablesUnderTDZ.environment().toVariableEnvironment().isEmpty());
2876 return WTF::nullopt;
2877 }
2878 return m_cachedVariablesUnderTDZ;
2879 }
2880
2881 // We keep track of variablesThatDontNeedTDZ in this algorithm to prevent
2882 // reporting that "x" is under TDZ if this function is called at "...".
2883 //
2884 // {
2885 // {
2886 // let x;
2887 // ...
2888 // }
2889 // let x;
2890 // }
2891 SmallPtrSet<UniquedStringImpl*, 16> variablesThatDontNeedTDZ;
2892 VariableEnvironment environment;
2893 for (unsigned i = m_TDZStack.size(); i--; ) {
2894 auto& map = m_TDZStack[i];
2895 for (auto& entry : map) {
2896 if (entry.value != TDZNecessityLevel::NotNeeded) {
2897 if (!variablesThatDontNeedTDZ.contains(entry.key.get()))
2898 environment.add(entry.key.get());
2899 } else
2900 variablesThatDontNeedTDZ.add(entry.key.get());
2901 }
2902 }
2903
2904 m_cachedVariablesUnderTDZ = m_vm->m_compactVariableMap->get(environment);
2905 m_hasCachedVariablesUnderTDZ = !environment.isEmpty();
2906 if (!m_hasCachedVariablesUnderTDZ)
2907 return WTF::nullopt;
2908
2909 return m_cachedVariablesUnderTDZ;
2910}
2911
2912void BytecodeGenerator::preserveTDZStack(BytecodeGenerator::PreservedTDZStack& preservedStack)
2913{
2914 preservedStack.m_preservedTDZStack = m_TDZStack;
2915}
2916
2917void BytecodeGenerator::restoreTDZStack(const BytecodeGenerator::PreservedTDZStack& preservedStack)
2918{
2919 m_TDZStack = preservedStack.m_preservedTDZStack;
2920 m_cachedVariablesUnderTDZ = { };
2921}
2922
2923RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
2924{
2925 OpNewObject::emit(this, dst, 0);
2926 m_staticPropertyAnalyzer.newObject(dst, m_lastInstruction);
2927
2928 return dst;
2929}
2930
2931JSValue BytecodeGenerator::addBigIntConstant(const Identifier& identifier, uint8_t radix, bool sign)
2932{
2933 return m_bigIntMap.ensure(BigIntMapEntry(identifier.impl(), radix, sign), [&] {
2934 auto scope = DECLARE_CATCH_SCOPE(*vm());
2935 auto parseIntSign = sign ? JSBigInt::ParseIntSign::Signed : JSBigInt::ParseIntSign::Unsigned;
2936 JSBigInt* bigIntInMap = JSBigInt::parseInt(nullptr, *vm(), identifier.string(), radix, JSBigInt::ErrorParseMode::ThrowExceptions, parseIntSign);
2937 // FIXME: [ESNext] Enables a way to throw an error on ByteCodeGenerator step
2938 // https://bugs.webkit.org/show_bug.cgi?id=180139
2939 scope.assertNoException();
2940 RELEASE_ASSERT(bigIntInMap);
2941 addConstantValue(bigIntInMap);
2942
2943 return bigIntInMap;
2944 }).iterator->value;
2945}
2946
2947JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
2948{
2949 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
2950 if (!stringInMap) {
2951 stringInMap = jsString(vm(), identifier.string());
2952 addConstantValue(stringInMap);
2953 }
2954 return stringInMap;
2955}
2956
2957RegisterID* BytecodeGenerator::addTemplateObjectConstant(Ref<TemplateObjectDescriptor>&& descriptor)
2958{
2959 JSTemplateObjectDescriptor* descriptorValue = m_templateObjectDescriptorMap.ensure(descriptor.copyRef(), [&] {
2960 return JSTemplateObjectDescriptor::create(*vm(), WTFMove(descriptor));
2961 }).iterator->value;
2962
2963 int index = addConstantIndex();
2964 m_codeBlock->addConstant(descriptorValue);
2965 return &m_constantPoolRegisters[index];
2966}
2967
2968RegisterID* BytecodeGenerator::emitNewArrayBuffer(RegisterID* dst, JSImmutableButterfly* array, IndexingType recommendedIndexingType)
2969{
2970 OpNewArrayBuffer::emit(this, dst, addConstantValue(array), recommendedIndexingType);
2971 return dst;
2972}
2973
2974RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length, IndexingType recommendedIndexingType)
2975{
2976 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
2977 for (ElementNode* n = elements; n; n = n->next()) {
2978 if (!length)
2979 break;
2980 length--;
2981 ASSERT(!n->value()->isSpreadExpression());
2982 argv.append(newTemporary());
2983 // op_new_array requires the initial values to be a sequential range of registers
2984 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
2985 emitNode(argv.last().get(), n->value());
2986 }
2987 ASSERT(!length);
2988 OpNewArray::emit(this, dst, argv.size() ? argv[0].get() : VirtualRegister { 0 }, argv.size(), recommendedIndexingType);
2989 return dst;
2990}
2991
2992RegisterID* BytecodeGenerator::emitNewArrayWithSpread(RegisterID* dst, ElementNode* elements)
2993{
2994 BitVector bitVector;
2995 Vector<RefPtr<RegisterID>, 16> argv;
2996 for (ElementNode* node = elements; node; node = node->next()) {
2997 bitVector.set(argv.size(), node->value()->isSpreadExpression());
2998
2999 argv.append(newTemporary());
3000 // op_new_array_with_spread requires the initial values to be a sequential range of registers.
3001 RELEASE_ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
3002 }
3003
3004 RELEASE_ASSERT(argv.size());
3005
3006 {
3007 unsigned i = 0;
3008 for (ElementNode* node = elements; node; node = node->next()) {
3009 if (node->value()->isSpreadExpression()) {
3010 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(node->value())->expression();
3011 RefPtr<RegisterID> tmp = newTemporary();
3012 emitNode(tmp.get(), expression);
3013
3014 OpSpread::emit(this, argv[i].get(), tmp.get());
3015 } else {
3016 ExpressionNode* expression = node->value();
3017 emitNode(argv[i].get(), expression);
3018 }
3019 i++;
3020 }
3021 }
3022
3023 unsigned bitVectorIndex = m_codeBlock->addBitVector(WTFMove(bitVector));
3024 OpNewArrayWithSpread::emit(this, dst, argv[0].get(), argv.size(), bitVectorIndex);
3025 return dst;
3026}
3027
3028RegisterID* BytecodeGenerator::emitNewArrayWithSize(RegisterID* dst, RegisterID* length)
3029{
3030 OpNewArrayWithSize::emit(this, dst, length);
3031 return dst;
3032}
3033
3034RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
3035{
3036 OpNewRegexp::emit(this, dst, addConstantValue(regExp));
3037 return dst;
3038}
3039
3040void BytecodeGenerator::emitNewFunctionExpressionCommon(RegisterID* dst, FunctionMetadataNode* function)
3041{
3042 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
3043
3044 switch (function->parseMode()) {
3045 case SourceParseMode::GeneratorWrapperFunctionMode:
3046 case SourceParseMode::GeneratorWrapperMethodMode:
3047 OpNewGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3048 break;
3049 case SourceParseMode::AsyncFunctionMode:
3050 case SourceParseMode::AsyncMethodMode:
3051 case SourceParseMode::AsyncArrowFunctionMode:
3052 OpNewAsyncFuncExp::emit(this, dst, scopeRegister(), index);
3053 break;
3054 case SourceParseMode::AsyncGeneratorWrapperFunctionMode:
3055 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
3056 OpNewAsyncGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3057 break;
3058 default:
3059 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3060 break;
3061 }
3062}
3063
3064RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* dst, FuncExprNode* func)
3065{
3066 emitNewFunctionExpressionCommon(dst, func->metadata());
3067 return dst;
3068}
3069
3070RegisterID* BytecodeGenerator::emitNewArrowFunctionExpression(RegisterID* dst, ArrowFuncExprNode* func)
3071{
3072 ASSERT(SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(func->metadata()->parseMode()));
3073 emitNewFunctionExpressionCommon(dst, func->metadata());
3074 return dst;
3075}
3076
3077RegisterID* BytecodeGenerator::emitNewMethodDefinition(RegisterID* dst, MethodDefinitionNode* func)
3078{
3079 ASSERT(isMethodParseMode(func->metadata()->parseMode()));
3080 emitNewFunctionExpressionCommon(dst, func->metadata());
3081 return dst;
3082}
3083
3084RegisterID* BytecodeGenerator::emitNewDefaultConstructor(RegisterID* dst, ConstructorKind constructorKind, const Identifier& name,
3085 const Identifier& ecmaName, const SourceCode& classSource)
3086{
3087 UnlinkedFunctionExecutable* executable = m_vm->builtinExecutables()->createDefaultConstructor(constructorKind, name);
3088 executable->setInvalidTypeProfilingOffsets();
3089 executable->setEcmaName(ecmaName);
3090 executable->setClassSource(classSource);
3091
3092 unsigned index = m_codeBlock->addFunctionExpr(executable);
3093
3094 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3095 return dst;
3096}
3097
3098RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionMetadataNode* function)
3099{
3100 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(function));
3101 if (isGeneratorWrapperParseMode(function->parseMode()))
3102 OpNewGeneratorFunc::emit(this, dst, scopeRegister(), index);
3103 else if (function->parseMode() == SourceParseMode::AsyncFunctionMode)
3104 OpNewAsyncFunc::emit(this, dst, scopeRegister(), index);
3105 else if (isAsyncGeneratorWrapperParseMode(function->parseMode()))
3106 OpNewAsyncGeneratorFunc::emit(this, dst, scopeRegister(), index);
3107 else
3108 OpNewFunc::emit(this, dst, scopeRegister(), index);
3109 return dst;
3110}
3111
3112void BytecodeGenerator::emitSetFunctionNameIfNeeded(ExpressionNode* valueNode, RegisterID* value, RegisterID* name)
3113{
3114 if (valueNode->isBaseFuncExprNode()) {
3115 FunctionMetadataNode* metadata = static_cast<BaseFuncExprNode*>(valueNode)->metadata();
3116 if (!metadata->ecmaName().isNull())
3117 return;
3118 } else if (valueNode->isClassExprNode()) {
3119 ClassExprNode* classExprNode = static_cast<ClassExprNode*>(valueNode);
3120 if (!classExprNode->ecmaName().isNull())
3121 return;
3122 if (classExprNode->hasStaticProperty(m_vm->propertyNames->name))
3123 return;
3124 } else
3125 return;
3126
3127 // FIXME: We should use an op_call to an internal function here instead.
3128 // https://bugs.webkit.org/show_bug.cgi?id=155547
3129 OpSetFunctionName::emit(this, value, name);
3130}
3131
3132RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3133{
3134 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3135}
3136
3137RegisterID* BytecodeGenerator::emitCallInTailPosition(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3138{
3139 if (m_inTailPosition) {
3140 m_codeBlock->setHasTailCalls();
3141 return emitCall<OpTailCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3142 }
3143 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3144}
3145
3146RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3147{
3148 return emitCall<OpCallEval>(dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3149}
3150
3151ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
3152{
3153 if (identifier == propertyNames().Object || identifier == propertyNames().builtinNames().ObjectPrivateName())
3154 return ExpectObjectConstructor;
3155 if (identifier == propertyNames().Array || identifier == propertyNames().builtinNames().ArrayPrivateName())
3156 return ExpectArrayConstructor;
3157 return NoExpectedFunction;
3158}
3159
3160ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label& done)
3161{
3162 Ref<Label> realCall = newLabel();
3163 switch (expectedFunction) {
3164 case ExpectObjectConstructor: {
3165 // If the number of arguments is non-zero, then we can't do anything interesting.
3166 if (callArguments.argumentCountIncludingThis() >= 2)
3167 return NoExpectedFunction;
3168
3169 OpJneqPtr::emit(this, func, Special::ObjectConstructor, realCall->bind(this));
3170
3171 if (dst != ignoredResult())
3172 emitNewObject(dst);
3173 break;
3174 }
3175
3176 case ExpectArrayConstructor: {
3177 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
3178 // don't do inline it, for now. The only reason is that call arguments are in
3179 // the opposite order of what op_new_array expects, so we'd either need to change
3180 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
3181 // things sounds like it's worth it.
3182 if (callArguments.argumentCountIncludingThis() > 2)
3183 return NoExpectedFunction;
3184
3185 OpJneqPtr::emit(this, func, Special::ArrayConstructor, realCall->bind(this));
3186
3187 if (dst != ignoredResult()) {
3188 if (callArguments.argumentCountIncludingThis() == 2)
3189 emitNewArrayWithSize(dst, callArguments.argumentRegister(0));
3190 else {
3191 ASSERT(callArguments.argumentCountIncludingThis() == 1);
3192 OpNewArray::emit(this, dst, VirtualRegister { 0 }, 0, ArrayWithUndecided);
3193 }
3194 }
3195 break;
3196 }
3197
3198 default:
3199 ASSERT(expectedFunction == NoExpectedFunction);
3200 return NoExpectedFunction;
3201 }
3202
3203 OpJmp::emit(this, done.bind(this));
3204 emitLabel(realCall.get());
3205
3206 return expectedFunction;
3207}
3208
3209template<typename CallOp>
3210RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3211{
3212 constexpr auto opcodeID = CallOp::opcodeID;
3213 ASSERT(opcodeID == op_call || opcodeID == op_call_eval || opcodeID == op_tail_call);
3214 ASSERT(func->refCount());
3215
3216 // Generate code for arguments.
3217 unsigned argument = 0;
3218 if (callArguments.argumentsNode()) {
3219 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3220 if (n && n->m_expr->isSpreadExpression()) {
3221 RELEASE_ASSERT(!n->m_next);
3222 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3223 if (expression->isArrayLiteral()) {
3224 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3225 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3226 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3227 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3228 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3229
3230 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3231 }
3232 }
3233 RefPtr<RegisterID> argumentRegister;
3234 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3235 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3236 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3237 }
3238 for (; n; n = n->m_next)
3239 emitNode(callArguments.argumentRegister(argument++), n);
3240 }
3241
3242 // Reserve space for call frame.
3243 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3244 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3245 callFrame.append(newTemporary());
3246
3247 if (m_shouldEmitDebugHooks && debuggableCall == DebuggableCall::Yes)
3248 emitDebugHook(WillExecuteExpression, divotStart);
3249
3250 emitExpressionInfo(divot, divotStart, divotEnd);
3251
3252 Ref<Label> done = newLabel();
3253 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3254
3255 if (opcodeID == op_tail_call)
3256 emitLogShadowChickenTailIfNecessary();
3257
3258 // Emit call.
3259 ASSERT(dst);
3260 ASSERT(dst != ignoredResult());
3261 CallOp::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3262
3263 if (expectedFunction != NoExpectedFunction)
3264 emitLabel(done.get());
3265
3266 return dst;
3267}
3268
3269RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3270{
3271 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3272}
3273
3274RegisterID* BytecodeGenerator::emitCallVarargsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3275{
3276 if (m_inTailPosition)
3277 return emitCallVarargs<OpTailCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3278 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3279}
3280
3281RegisterID* BytecodeGenerator::emitConstructVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3282{
3283 return emitCallVarargs<OpConstructVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3284}
3285
3286RegisterID* BytecodeGenerator::emitCallForwardArgumentsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3287{
3288 ASSERT(m_inTailPosition);
3289 return emitCallVarargs<OpTailCallForwardArguments>(dst, func, thisRegister, nullptr, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3290}
3291
3292template<typename VarargsOp>
3293RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3294{
3295 if (m_shouldEmitDebugHooks && debuggableCall == DebuggableCall::Yes)
3296 emitDebugHook(WillExecuteExpression, divotStart);
3297
3298 emitExpressionInfo(divot, divotStart, divotEnd);
3299
3300 if (VarargsOp::opcodeID == op_tail_call_varargs)
3301 emitLogShadowChickenTailIfNecessary();
3302
3303 // Emit call.
3304 ASSERT(dst != ignoredResult());
3305 VarargsOp::emit(this, dst, func, thisRegister, arguments ? arguments : VirtualRegister(0), firstFreeRegister, firstVarArgOffset);
3306 return dst;
3307}
3308
3309void BytecodeGenerator::emitLogShadowChickenPrologueIfNecessary()
3310{
3311 if (!m_shouldEmitDebugHooks && !Options::alwaysUseShadowChicken())
3312 return;
3313 OpLogShadowChickenPrologue::emit(this, scopeRegister());
3314}
3315
3316void BytecodeGenerator::emitLogShadowChickenTailIfNecessary()
3317{
3318 if (!m_shouldEmitDebugHooks && !Options::alwaysUseShadowChicken())
3319 return;
3320 OpLogShadowChickenTail::emit(this, thisRegister(), scopeRegister());
3321}
3322
3323void BytecodeGenerator::emitCallDefineProperty(RegisterID* newObj, RegisterID* propertyNameRegister,
3324 RegisterID* valueRegister, RegisterID* getterRegister, RegisterID* setterRegister, unsigned options, const JSTextPosition& position)
3325{
3326 DefinePropertyAttributes attributes;
3327 if (options & PropertyConfigurable)
3328 attributes.setConfigurable(true);
3329
3330 if (options & PropertyWritable)
3331 attributes.setWritable(true);
3332 else if (valueRegister)
3333 attributes.setWritable(false);
3334
3335 if (options & PropertyEnumerable)
3336 attributes.setEnumerable(true);
3337
3338 if (valueRegister)
3339 attributes.setValue();
3340 if (getterRegister)
3341 attributes.setGet();
3342 if (setterRegister)
3343 attributes.setSet();
3344
3345 ASSERT(!valueRegister || (!getterRegister && !setterRegister));
3346
3347 emitExpressionInfo(position, position, position);
3348
3349 if (attributes.hasGet() || attributes.hasSet()) {
3350 RefPtr<RegisterID> throwTypeErrorFunction;
3351 if (!attributes.hasGet() || !attributes.hasSet())
3352 throwTypeErrorFunction = moveLinkTimeConstant(nullptr, LinkTimeConstant::ThrowTypeErrorFunction);
3353
3354 RefPtr<RegisterID> getter;
3355 if (attributes.hasGet())
3356 getter = getterRegister;
3357 else
3358 getter = throwTypeErrorFunction;
3359
3360 RefPtr<RegisterID> setter;
3361 if (attributes.hasSet())
3362 setter = setterRegister;
3363 else
3364 setter = throwTypeErrorFunction;
3365
3366 OpDefineAccessorProperty::emit(this, newObj, propertyNameRegister, getter.get(), setter.get(), emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3367 } else {
3368 OpDefineDataProperty::emit(this, newObj, propertyNameRegister, valueRegister, emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3369 }
3370}
3371
3372RegisterID* BytecodeGenerator::emitReturn(RegisterID* src, ReturnFrom from)
3373{
3374 if (isConstructor()) {
3375 bool isDerived = constructorKind() == ConstructorKind::Extends;
3376 bool srcIsThis = src->index() == m_thisRegister.index();
3377
3378 if (isDerived && (srcIsThis || from == ReturnFrom::Finally))
3379 emitTDZCheck(src);
3380
3381 if (!srcIsThis || from == ReturnFrom::Finally) {
3382 Ref<Label> isObjectLabel = newLabel();
3383 emitJumpIfTrue(emitIsObject(newTemporary(), src), isObjectLabel.get());
3384
3385 if (isDerived) {
3386 Ref<Label> isUndefinedLabel = newLabel();
3387 emitJumpIfTrue(emitIsUndefined(newTemporary(), src), isUndefinedLabel.get());
3388 emitThrowTypeError("Cannot return a non-object type in the constructor of a derived class.");
3389 emitLabel(isUndefinedLabel.get());
3390 emitTDZCheck(&m_thisRegister);
3391 }
3392 OpRet::emit(this, &m_thisRegister);
3393 emitLabel(isObjectLabel.get());
3394 }
3395 }
3396
3397 OpRet::emit(this, src);
3398 return src;
3399}
3400
3401RegisterID* BytecodeGenerator::emitEnd(RegisterID* src)
3402{
3403 OpEnd::emit(this, src);
3404 return src;
3405}
3406
3407
3408RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, RegisterID* lazyThis, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
3409{
3410 ASSERT(func->refCount());
3411
3412 // Generate code for arguments.
3413 unsigned argument = 0;
3414 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
3415
3416 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3417 if (n && n->m_expr->isSpreadExpression()) {
3418 RELEASE_ASSERT(!n->m_next);
3419 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3420 if (expression->isArrayLiteral()) {
3421 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3422 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3423 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3424 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3425 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3426
3427 move(callArguments.thisRegister(), lazyThis);
3428 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3429 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3430 }
3431 }
3432 RefPtr<RegisterID> argumentRegister;
3433 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3434 move(callArguments.thisRegister(), lazyThis);
3435 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3436 }
3437
3438 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
3439 emitNode(callArguments.argumentRegister(argument++), n);
3440 }
3441
3442 move(callArguments.thisRegister(), lazyThis);
3443
3444 // Reserve space for call frame.
3445 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3446 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3447 callFrame.append(newTemporary());
3448
3449 emitExpressionInfo(divot, divotStart, divotEnd);
3450
3451 Ref<Label> done = newLabel();
3452 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3453
3454 OpConstruct::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3455
3456 if (expectedFunction != NoExpectedFunction)
3457 emitLabel(done.get());
3458
3459 return dst;
3460}
3461
3462RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
3463{
3464 OpStrcat::emit(this, dst, src, count);
3465 return dst;
3466}
3467
3468void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
3469{
3470 OpToPrimitive::emit(this, dst, src);
3471}
3472
3473void BytecodeGenerator::emitGetScope()
3474{
3475 OpGetScope::emit(this, scopeRegister());
3476}
3477
3478RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* objectScope)
3479{
3480 pushLocalControlFlowScope();
3481 RegisterID* newScope = newBlockScopeVariable();
3482 newScope->ref();
3483
3484 OpPushWithScope::emit(this, newScope, scopeRegister(), objectScope);
3485
3486 move(scopeRegister(), newScope);
3487 m_lexicalScopeStack.append({ nullptr, newScope, true, 0 });
3488
3489 return newScope;
3490}
3491
3492RegisterID* BytecodeGenerator::emitGetParentScope(RegisterID* dst, RegisterID* scope)
3493{
3494 OpGetParentScope::emit(this, dst, scope);
3495 return dst;
3496}
3497
3498void BytecodeGenerator::emitPopScope(RegisterID* dst, RegisterID* scope)
3499{
3500 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), scope);
3501 move(dst, parentScope.get());
3502}
3503
3504void BytecodeGenerator::emitPopWithScope()
3505{
3506 emitPopScope(scopeRegister(), scopeRegister());
3507 popLocalControlFlowScope();
3508 auto stackEntry = m_lexicalScopeStack.takeLast();
3509 stackEntry.m_scope->deref();
3510 RELEASE_ASSERT(stackEntry.m_isWithScope);
3511}
3512
3513void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, const JSTextPosition& divot)
3514{
3515 if (!m_shouldEmitDebugHooks)
3516 return;
3517
3518 emitExpressionInfo(divot, divot, divot);
3519 OpDebug::emit(this, debugHookType, false);
3520}
3521
3522void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, unsigned line, unsigned charOffset, unsigned lineStart)
3523{
3524 emitDebugHook(debugHookType, JSTextPosition(line, charOffset, lineStart));
3525}
3526
3527void BytecodeGenerator::emitDebugHook(StatementNode* statement)
3528{
3529 // DebuggerStatementNode will output its own special debug hook.
3530 if (statement->isDebuggerStatement())
3531 return;
3532
3533 emitDebugHook(WillExecuteStatement, statement->position());
3534}
3535
3536void BytecodeGenerator::emitDebugHook(ExpressionNode* expr)
3537{
3538 emitDebugHook(WillExecuteStatement, expr->position());
3539}
3540
3541void BytecodeGenerator::emitWillLeaveCallFrameDebugHook()
3542{
3543 RELEASE_ASSERT(m_scopeNode->isFunctionNode());
3544 emitDebugHook(WillLeaveCallFrame, m_scopeNode->lastLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
3545}
3546
3547void BytecodeGenerator::pushFinallyControlFlowScope(FinallyContext& finallyContext)
3548{
3549 ControlFlowScope scope(ControlFlowScope::Finally, currentLexicalScopeIndex(), &finallyContext);
3550 m_controlFlowScopeStack.append(WTFMove(scope));
3551
3552 m_finallyDepth++;
3553 m_currentFinallyContext = &finallyContext;
3554}
3555
3556void BytecodeGenerator::popFinallyControlFlowScope()
3557{
3558 ASSERT(m_controlFlowScopeStack.size());
3559 ASSERT(m_controlFlowScopeStack.last().isFinallyScope());
3560 ASSERT(m_finallyDepth > 0);
3561 ASSERT(m_currentFinallyContext);
3562 m_currentFinallyContext = m_currentFinallyContext->outerContext();
3563 m_finallyDepth--;
3564 m_controlFlowScopeStack.removeLast();
3565}
3566
3567LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
3568{
3569 shrinkToFit(m_labelScopes);
3570
3571 if (!m_labelScopes.size())
3572 return nullptr;
3573
3574 // We special-case the following, which is a syntax error in Firefox:
3575 // label:
3576 // break;
3577 if (name.isEmpty()) {
3578 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3579 LabelScope& scope = m_labelScopes[i];
3580 if (scope.type() != LabelScope::NamedLabel)
3581 return &scope;
3582 }
3583 return nullptr;
3584 }
3585
3586 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3587 LabelScope& scope = m_labelScopes[i];
3588 if (scope.name() && *scope.name() == name)
3589 return &scope;
3590 }
3591 return nullptr;
3592}
3593
3594LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
3595{
3596 shrinkToFit(m_labelScopes);
3597
3598 if (!m_labelScopes.size())
3599 return nullptr;
3600
3601 if (name.isEmpty()) {
3602 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3603 LabelScope& scope = m_labelScopes[i];
3604 if (scope.type() == LabelScope::Loop) {
3605 ASSERT(scope.continueTarget());
3606 return &scope;
3607 }
3608 }
3609 return nullptr;
3610 }
3611
3612 // Continue to the loop nested nearest to the label scope that matches
3613 // 'name'.
3614 LabelScope* result = nullptr;
3615 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3616 LabelScope& scope = m_labelScopes[i];
3617 if (scope.type() == LabelScope::Loop) {
3618 ASSERT(scope.continueTarget());
3619 result = &scope;
3620 }
3621 if (scope.name() && *scope.name() == name)
3622 return result; // may be null.
3623 }
3624 return nullptr;
3625}
3626
3627void BytecodeGenerator::allocateCalleeSaveSpace()
3628{
3629 size_t virtualRegisterCountForCalleeSaves = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters();
3630
3631 for (size_t i = 0; i < virtualRegisterCountForCalleeSaves; i++) {
3632 RegisterID* localRegister = addVar();
3633 localRegister->ref();
3634 m_localRegistersForCalleeSaveRegisters.append(localRegister);
3635 }
3636}
3637
3638void BytecodeGenerator::allocateAndEmitScope()
3639{
3640 m_scopeRegister = addVar();
3641 m_scopeRegister->ref();
3642 m_codeBlock->setScopeRegister(scopeRegister()->virtualRegister());
3643 emitGetScope();
3644 m_topMostScope = addVar();
3645 move(m_topMostScope, scopeRegister());
3646}
3647
3648TryData* BytecodeGenerator::pushTry(Label& start, Label& handlerLabel, HandlerType handlerType)
3649{
3650 m_tryData.append(TryData { handlerLabel, handlerType });
3651 TryData* result = &m_tryData.last();
3652
3653 m_tryContextStack.append(TryContext {
3654 start,
3655 result
3656 });
3657
3658 return result;
3659}
3660
3661void BytecodeGenerator::popTry(TryData* tryData, Label& end)
3662{
3663 m_usesExceptions = true;
3664
3665 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
3666
3667 m_tryRanges.append(TryRange {
3668 m_tryContextStack.last().start.copyRef(),
3669 end,
3670 m_tryContextStack.last().tryData
3671 });
3672 m_tryContextStack.removeLast();
3673}
3674
3675void BytecodeGenerator::emitOutOfLineCatchHandler(RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3676{
3677 RegisterID* unused = newTemporary();
3678 emitOutOfLineExceptionHandler(unused, thrownValueRegister, completionTypeRegister, data);
3679}
3680
3681void BytecodeGenerator::emitOutOfLineFinallyHandler(RegisterID* exceptionRegister, RegisterID* completionTypeRegister, TryData* data)
3682{
3683 RegisterID* unused = newTemporary();
3684 ASSERT(completionTypeRegister);
3685 emitOutOfLineExceptionHandler(exceptionRegister, unused, completionTypeRegister, data);
3686}
3687
3688void BytecodeGenerator::emitOutOfLineExceptionHandler(RegisterID* exceptionRegister, RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3689{
3690 VirtualRegister completionTypeVirtualRegister = completionTypeRegister ? completionTypeRegister : VirtualRegister();
3691 m_exceptionHandlersToEmit.append({ data, exceptionRegister, thrownValueRegister, completionTypeVirtualRegister });
3692}
3693
3694void BytecodeGenerator::restoreScopeRegister(int lexicalScopeIndex)
3695{
3696 if (lexicalScopeIndex == CurrentLexicalScopeIndex)
3697 return; // No change needed.
3698
3699 if (lexicalScopeIndex != OutermostLexicalScopeIndex) {
3700 ASSERT(lexicalScopeIndex < static_cast<int>(m_lexicalScopeStack.size()));
3701 int endIndex = lexicalScopeIndex + 1;
3702 for (size_t i = endIndex; i--; ) {
3703 if (m_lexicalScopeStack[i].m_scope) {
3704 move(scopeRegister(), m_lexicalScopeStack[i].m_scope);
3705 return;
3706 }
3707 }
3708 }
3709 // Note that if we don't find a local scope in the current function/program,
3710 // we must grab the outer-most scope of this bytecode generation.
3711 move(scopeRegister(), m_topMostScope);
3712}
3713
3714void BytecodeGenerator::restoreScopeRegister()
3715{
3716 restoreScopeRegister(currentLexicalScopeIndex());
3717}
3718
3719int BytecodeGenerator::labelScopeDepthToLexicalScopeIndex(int targetLabelScopeDepth)
3720{
3721 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
3722 size_t scopeDelta = labelScopeDepth() - targetLabelScopeDepth;
3723 ASSERT(scopeDelta <= m_controlFlowScopeStack.size());
3724 if (!scopeDelta)
3725 return CurrentLexicalScopeIndex;
3726
3727 ControlFlowScope& targetScope = m_controlFlowScopeStack[targetLabelScopeDepth];
3728 return targetScope.lexicalScopeIndex;
3729}
3730
3731void BytecodeGenerator::emitThrow(RegisterID* exc)
3732{
3733 m_usesExceptions = true;
3734 OpThrow::emit(this, exc);
3735}
3736
3737RegisterID* BytecodeGenerator::emitArgumentCount(RegisterID* dst)
3738{
3739 OpArgumentCount::emit(this, dst);
3740 return dst;
3741}
3742
3743int BytecodeGenerator::localScopeDepth() const
3744{
3745 return m_localScopeDepth;
3746}
3747
3748int BytecodeGenerator::labelScopeDepth() const
3749{
3750 unsigned depth = localScopeDepth() + m_finallyDepth;
3751 ASSERT(depth == m_controlFlowScopeStack.size());
3752 return depth;
3753}
3754
3755void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, RegisterID* raw)
3756{
3757 RefPtr<RegisterID> message = newTemporary();
3758 emitToString(message.get(), raw);
3759 OpThrowStaticError::emit(this, message.get(), errorType);
3760}
3761
3762void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, const Identifier& message)
3763{
3764 OpThrowStaticError::emit(this, addConstantValue(addStringConstant(message)), errorType);
3765}
3766
3767void BytecodeGenerator::emitThrowReferenceError(const String& message)
3768{
3769 emitThrowStaticError(ErrorType::ReferenceError, Identifier::fromString(m_vm, message));
3770}
3771
3772void BytecodeGenerator::emitThrowTypeError(const String& message)
3773{
3774 emitThrowStaticError(ErrorType::TypeError, Identifier::fromString(m_vm, message));
3775}
3776
3777void BytecodeGenerator::emitThrowTypeError(const Identifier& message)
3778{
3779 emitThrowStaticError(ErrorType::TypeError, message);
3780}
3781
3782void BytecodeGenerator::emitThrowRangeError(const Identifier& message)
3783{
3784 emitThrowStaticError(ErrorType::RangeError, message);
3785}
3786
3787void BytecodeGenerator::emitThrowOutOfMemoryError()
3788{
3789 emitThrowStaticError(ErrorType::Error, Identifier::fromString(m_vm, "Out of memory"));
3790}
3791
3792void BytecodeGenerator::emitPushFunctionNameScope(const Identifier& property, RegisterID* callee, bool isCaptured)
3793{
3794 // There is some nuance here:
3795 // If we're in strict mode code, the function name scope variable acts exactly like a "const" variable.
3796 // If we're not in strict mode code, we want to allow bogus assignments to the name scoped variable.
3797 // This means any assignment to the variable won't throw, but it won't actually assign a new value to it.
3798 // To accomplish this, we don't report that this scope is a lexical scope. This will prevent
3799 // any throws when trying to assign to the variable (while still ensuring it keeps its original
3800 // value). There is some ugliness and exploitation of a leaky abstraction here, but it's better than
3801 // having a completely new op code and a class to handle name scopes which are so close in functionality
3802 // to lexical environments.
3803 VariableEnvironment nameScopeEnvironment;
3804 auto addResult = nameScopeEnvironment.add(property);
3805 if (isCaptured)
3806 addResult.iterator->value.setIsCaptured();
3807 addResult.iterator->value.setIsConst(); // The function name scope name acts like a const variable.
3808 unsigned numVars = m_codeBlock->m_numVars;
3809 pushLexicalScopeInternal(nameScopeEnvironment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::NotUnderTDZ, ScopeType::FunctionNameScope, ScopeRegisterType::Var);
3810 ASSERT_UNUSED(numVars, m_codeBlock->m_numVars == static_cast<int>(numVars + 1)); // Should have only created one new "var" for the function name scope.
3811 bool shouldTreatAsLexicalVariable = isStrictMode();
3812 Variable functionVar = variableForLocalEntry(property, m_lexicalScopeStack.last().m_symbolTable->get(NoLockingNecessary, property.impl()), m_lexicalScopeStack.last().m_symbolTableConstantIndex, shouldTreatAsLexicalVariable);
3813 emitPutToScope(m_lexicalScopeStack.last().m_scope, functionVar, callee, ThrowIfNotFound, InitializationMode::NotInitialization);
3814}
3815
3816void BytecodeGenerator::pushLocalControlFlowScope()
3817{
3818 ControlFlowScope scope(ControlFlowScope::Label, currentLexicalScopeIndex());
3819 m_controlFlowScopeStack.append(WTFMove(scope));
3820 m_localScopeDepth++;
3821}
3822
3823void BytecodeGenerator::popLocalControlFlowScope()
3824{
3825 ASSERT(m_controlFlowScopeStack.size());
3826 ASSERT(!m_controlFlowScopeStack.last().isFinallyScope());
3827 m_controlFlowScopeStack.removeLast();
3828 m_localScopeDepth--;
3829}
3830
3831void BytecodeGenerator::emitPushCatchScope(VariableEnvironment& environment)
3832{
3833 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::CatchScope, ScopeRegisterType::Block);
3834}
3835
3836void BytecodeGenerator::emitPopCatchScope(VariableEnvironment& environment)
3837{
3838 popLexicalScopeInternal(environment);
3839}
3840
3841void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
3842{
3843 switch (type) {
3844 case SwitchInfo::SwitchImmediate: {
3845 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3846 m_codeBlock->addSwitchJumpTable();
3847 OpSwitchImm::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3848 break;
3849 }
3850 case SwitchInfo::SwitchCharacter: {
3851 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3852 m_codeBlock->addSwitchJumpTable();
3853 OpSwitchChar::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3854 break;
3855 }
3856 case SwitchInfo::SwitchString: {
3857 size_t tableIndex = m_codeBlock->numberOfStringSwitchJumpTables();
3858 m_codeBlock->addStringSwitchJumpTable();
3859 OpSwitchString::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3860 break;
3861 }
3862 default:
3863 RELEASE_ASSERT_NOT_REACHED();
3864 }
3865
3866 SwitchInfo info = { m_lastInstruction.offset(), type };
3867 m_switchContextStack.append(info);
3868}
3869
3870static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
3871{
3872 UNUSED_PARAM(max);
3873 ASSERT(node->isNumber());
3874 double value = static_cast<NumberNode*>(node)->value();
3875 int32_t key = static_cast<int32_t>(value);
3876 ASSERT(key == value);
3877 ASSERT(key >= min);
3878 ASSERT(key <= max);
3879 return key - min;
3880}
3881
3882static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
3883{
3884 UNUSED_PARAM(max);
3885 ASSERT(node->isString());
3886 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
3887 ASSERT(clause->length() == 1);
3888
3889 int32_t key = (*clause)[0];
3890 ASSERT(key >= min);
3891 ASSERT(key <= max);
3892 return key - min;
3893}
3894
3895static void prepareJumpTableForSwitch(
3896 UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
3897 const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, int32_t min, int32_t max,
3898 int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
3899{
3900 jumpTable.min = min;
3901 jumpTable.branchOffsets.resize(max - min + 1);
3902 jumpTable.branchOffsets.fill(0);
3903 for (uint32_t i = 0; i < clauseCount; ++i) {
3904 // We're emitting this after the clause labels should have been fixed, so
3905 // the labels should not be "forward" references
3906 ASSERT(!labels[i]->isForward());
3907 jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress));
3908 }
3909}
3910
3911static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes)
3912{
3913 for (uint32_t i = 0; i < clauseCount; ++i) {
3914 // We're emitting this after the clause labels should have been fixed, so
3915 // the labels should not be "forward" references
3916 ASSERT(!labels[i]->isForward());
3917
3918 ASSERT(nodes[i]->isString());
3919 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
3920 jumpTable.offsetTable.add(clause, UnlinkedStringJumpTable::OffsetLocation { labels[i]->bind(switchAddress) });
3921 }
3922}
3923
3924void BytecodeGenerator::endSwitch(uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, Label& defaultLabel, int32_t min, int32_t max)
3925{
3926 SwitchInfo switchInfo = m_switchContextStack.last();
3927 m_switchContextStack.removeLast();
3928
3929 BoundLabel defaultTarget = defaultLabel.bind(switchInfo.bytecodeOffset);
3930 auto handleSwitch = [&](auto* op, auto bytecode) {
3931 op->setDefaultOffset(defaultTarget, [&]() {
3932 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3933 return BoundLabel();
3934 });
3935
3936 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->switchJumpTable(bytecode.m_tableIndex);
3937 prepareJumpTableForSwitch(
3938 jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
3939 switchInfo.switchType == SwitchInfo::SwitchImmediate
3940 ? keyForImmediateSwitch
3941 : keyForCharacterSwitch);
3942 };
3943
3944 auto ref = m_writer.ref(switchInfo.bytecodeOffset);
3945 switch (switchInfo.switchType) {
3946 case SwitchInfo::SwitchImmediate: {
3947 handleSwitch(ref->cast<OpSwitchImm>(), ref->as<OpSwitchImm>());
3948 break;
3949 }
3950 case SwitchInfo::SwitchCharacter: {
3951 handleSwitch(ref->cast<OpSwitchChar>(), ref->as<OpSwitchChar>());
3952 break;
3953 }
3954
3955 case SwitchInfo::SwitchString: {
3956 ref->cast<OpSwitchString>()->setDefaultOffset(defaultTarget, [&]() {
3957 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3958 return BoundLabel();
3959 });
3960
3961 UnlinkedStringJumpTable& jumpTable = m_codeBlock->stringSwitchJumpTable(ref->as<OpSwitchString>().m_tableIndex);
3962 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
3963 break;
3964 }
3965
3966 default:
3967 RELEASE_ASSERT_NOT_REACHED();
3968 break;
3969 }
3970}
3971
3972RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
3973{
3974 // It would be nice to do an even better job of identifying exactly where the expression is.
3975 // And we could make the caller pass the node pointer in, if there was some way of getting
3976 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
3977 // is still good enough to get us an accurate line number.
3978 m_expressionTooDeep = true;
3979 return newTemporary();
3980}
3981
3982bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
3983{
3984 RegisterID* registerID = variable(ident).local();
3985 if (!registerID)
3986 return false;
3987 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
3988}
3989
3990bool BytecodeGenerator::emitReadOnlyExceptionIfNeeded(const Variable& variable)
3991{
3992 // If we're in strict mode, we always throw.
3993 // If we're not in strict mode, we throw for "const" variables but not the function callee.
3994 if (isStrictMode() || variable.isConst()) {
3995 emitThrowTypeError(Identifier::fromString(m_vm, ReadonlyPropertyWriteError));
3996 return true;
3997 }
3998 return false;
3999}
4000
4001void BytecodeGenerator::emitEnumeration(ThrowableExpressionData* node, ExpressionNode* subjectNode, const ScopedLambda<void(BytecodeGenerator&, RegisterID*)>& callBack, ForOfNode* forLoopNode, RegisterID* forLoopSymbolTable)
4002{
4003 bool isForAwait = forLoopNode ? forLoopNode->isForAwait() : false;
4004 ASSERT(!isForAwait || (isForAwait && isAsyncFunctionParseMode(parseMode())));
4005
4006 RefPtr<RegisterID> subject = newTemporary();
4007 emitNode(subject.get(), subjectNode);
4008 RefPtr<RegisterID> iterator = isForAwait ? emitGetAsyncIterator(subject.get(), node) : emitGetIterator(subject.get(), node);
4009 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4010
4011 Ref<Label> loopDone = newLabel();
4012 Ref<Label> tryStartLabel = newLabel();
4013 Ref<Label> finallyViaThrowLabel = newLabel();
4014 Ref<Label> finallyLabel = newLabel();
4015 Ref<Label> catchLabel = newLabel();
4016 Ref<Label> endCatchLabel = newLabel();
4017
4018 // RefPtr<Register> iterator's lifetime must be longer than IteratorCloseContext.
4019 FinallyContext finallyContext(*this, finallyLabel.get());
4020 pushFinallyControlFlowScope(finallyContext);
4021
4022 {
4023 Ref<LabelScope> scope = newLabelScope(LabelScope::Loop);
4024 RefPtr<RegisterID> value = newTemporary();
4025 emitLoad(value.get(), jsUndefined());
4026
4027 emitJump(*scope->continueTarget());
4028
4029 Ref<Label> loopStart = newLabel();
4030 emitLabel(loopStart.get());
4031 emitLoopHint();
4032
4033 emitLabel(tryStartLabel.get());
4034 TryData* tryData = pushTry(tryStartLabel.get(), finallyViaThrowLabel.get(), HandlerType::SynthesizedFinally);
4035 callBack(*this, value.get());
4036 emitJump(*scope->continueTarget());
4037
4038 // IteratorClose sequence for abrupt completions.
4039 {
4040 // Finally block for the enumeration.
4041 emitLabel(finallyViaThrowLabel.get());
4042 popTry(tryData, finallyViaThrowLabel.get());
4043
4044 Ref<Label> finallyBodyLabel = newLabel();
4045 RefPtr<RegisterID> finallyExceptionRegister = newTemporary();
4046
4047 emitOutOfLineFinallyHandler(finallyContext.completionValueRegister(), finallyContext.completionTypeRegister(), tryData);
4048 move(finallyExceptionRegister.get(), finallyContext.completionValueRegister());
4049 emitJump(finallyBodyLabel.get());
4050
4051 emitLabel(finallyLabel.get());
4052 moveEmptyValue(finallyExceptionRegister.get());
4053
4054 // Finally fall through case.
4055 emitLabel(finallyBodyLabel.get());
4056 restoreScopeRegister();
4057
4058 Ref<Label> finallyDone = newLabel();
4059
4060 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4061 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), finallyDone.get());
4062
4063 Ref<Label> returnCallTryStart = newLabel();
4064 emitLabel(returnCallTryStart.get());
4065 TryData* returnCallTryData = pushTry(returnCallTryStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
4066
4067 CallArguments returnArguments(*this, nullptr);
4068 move(returnArguments.thisRegister(), iterator.get());
4069 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4070
4071 if (isForAwait)
4072 emitAwait(value.get());
4073
4074 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), finallyDone.get());
4075 emitThrowTypeError("Iterator result interface is not an object."_s);
4076
4077 emitLabel(finallyDone.get());
4078 emitFinallyCompletion(finallyContext, endCatchLabel.get());
4079
4080 popTry(returnCallTryData, finallyDone.get());
4081
4082 // Catch block for exceptions that may be thrown while calling the return
4083 // handler in the enumeration finally block. The only reason we need this
4084 // catch block is because if entered the above finally block due to a thrown
4085 // exception, then we want to re-throw the original exception on exiting
4086 // the finally block. Otherwise, we'll let any new exception pass through.
4087 {
4088 emitLabel(catchLabel.get());
4089
4090 RefPtr<RegisterID> exceptionRegister = newTemporary();
4091 emitOutOfLineFinallyHandler(exceptionRegister.get(), finallyContext.completionTypeRegister(), returnCallTryData);
4092 // Since this is a synthesized catch block and we're guaranteed to never need
4093 // to resolve any symbols from the scope, we can skip restoring the scope
4094 // register here.
4095
4096 Ref<Label> throwLabel = newLabel();
4097 emitJumpIfTrue(emitIsEmpty(newTemporary(), finallyExceptionRegister.get()), throwLabel.get());
4098 move(exceptionRegister.get(), finallyExceptionRegister.get());
4099
4100 emitLabel(throwLabel.get());
4101 emitThrow(exceptionRegister.get());
4102
4103 emitLabel(endCatchLabel.get());
4104 }
4105 }
4106
4107 emitLabel(*scope->continueTarget());
4108 if (forLoopNode) {
4109 RELEASE_ASSERT(forLoopNode->isForOfNode());
4110 prepareLexicalScopeForNextForLoopIteration(forLoopNode, forLoopSymbolTable);
4111 emitDebugHook(forLoopNode->lexpr());
4112 }
4113
4114 {
4115 emitIteratorNext(value.get(), nextMethod.get(), iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4116
4117 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4118 emitGetById(value.get(), value.get(), propertyNames().value);
4119 emitJump(loopStart.get());
4120 }
4121
4122 bool breakLabelIsBound = scope->breakTargetMayBeBound();
4123 if (breakLabelIsBound)
4124 emitLabel(scope->breakTarget());
4125 popFinallyControlFlowScope();
4126 if (breakLabelIsBound) {
4127 // IteratorClose sequence for break-ed control flow.
4128 emitIteratorClose(iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4129 }
4130 }
4131 emitLabel(loopDone.get());
4132}
4133
4134RegisterID* BytecodeGenerator::emitGetTemplateObject(RegisterID* dst, TaggedTemplateNode* taggedTemplate)
4135{
4136 TemplateObjectDescriptor::StringVector rawStrings;
4137 TemplateObjectDescriptor::OptionalStringVector cookedStrings;
4138
4139 TemplateStringListNode* templateString = taggedTemplate->templateLiteral()->templateStrings();
4140 for (; templateString; templateString = templateString->next()) {
4141 auto* string = templateString->value();
4142 ASSERT(string->raw());
4143 rawStrings.append(string->raw()->impl());
4144 if (!string->cooked())
4145 cookedStrings.append(WTF::nullopt);
4146 else
4147 cookedStrings.append(string->cooked()->impl());
4148 }
4149 RefPtr<RegisterID> constant = addTemplateObjectConstant(TemplateObjectDescriptor::create(WTFMove(rawStrings), WTFMove(cookedStrings)));
4150 if (!dst)
4151 return constant.get();
4152 return move(dst, constant.get());
4153}
4154
4155RegisterID* BytecodeGenerator::emitGetGlobalPrivate(RegisterID* dst, const Identifier& property)
4156{
4157 dst = tempDestination(dst);
4158 Variable var = variable(property);
4159 if (RegisterID* local = var.local())
4160 return move(dst, local);
4161
4162 RefPtr<RegisterID> scope = newTemporary();
4163 move(scope.get(), emitResolveScope(scope.get(), var));
4164 return emitGetFromScope(dst, scope.get(), var, ThrowIfNotFound);
4165}
4166
4167RegisterID* BytecodeGenerator::emitGetEnumerableLength(RegisterID* dst, RegisterID* base)
4168{
4169 OpGetEnumerableLength::emit(this, dst, base);
4170 return dst;
4171}
4172
4173RegisterID* BytecodeGenerator::emitHasGenericProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4174{
4175 OpHasGenericProperty::emit(this, dst, base, propertyName);
4176 return dst;
4177}
4178
4179RegisterID* BytecodeGenerator::emitHasIndexedProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4180{
4181 OpHasIndexedProperty::emit(this, dst, base, propertyName);
4182 return dst;
4183}
4184
4185RegisterID* BytecodeGenerator::emitHasStructureProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName, RegisterID* enumerator)
4186{
4187 OpHasStructureProperty::emit(this, dst, base, propertyName, enumerator);
4188 return dst;
4189}
4190
4191RegisterID* BytecodeGenerator::emitGetPropertyEnumerator(RegisterID* dst, RegisterID* base)
4192{
4193 OpGetPropertyEnumerator::emit(this, dst, base);
4194 return dst;
4195}
4196
4197RegisterID* BytecodeGenerator::emitEnumeratorStructurePropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4198{
4199 OpEnumeratorStructurePname::emit(this, dst, enumerator, index);
4200 return dst;
4201}
4202
4203RegisterID* BytecodeGenerator::emitEnumeratorGenericPropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4204{
4205 OpEnumeratorGenericPname::emit(this, dst, enumerator, index);
4206 return dst;
4207}
4208
4209RegisterID* BytecodeGenerator::emitToIndexString(RegisterID* dst, RegisterID* index)
4210{
4211 OpToIndexString::emit(this, dst, index);
4212 return dst;
4213}
4214
4215RegisterID* BytecodeGenerator::emitIsCellWithType(RegisterID* dst, RegisterID* src, JSType type)
4216{
4217 OpIsCellWithType::emit(this, dst, src, type);
4218 return dst;
4219}
4220
4221RegisterID* BytecodeGenerator::emitIsObject(RegisterID* dst, RegisterID* src)
4222{
4223 OpIsObject::emit(this, dst, src);
4224 return dst;
4225}
4226
4227RegisterID* BytecodeGenerator::emitIsNumber(RegisterID* dst, RegisterID* src)
4228{
4229 OpIsNumber::emit(this, dst, src);
4230 return dst;
4231}
4232
4233RegisterID* BytecodeGenerator::emitIsUndefined(RegisterID* dst, RegisterID* src)
4234{
4235 OpIsUndefined::emit(this, dst, src);
4236 return dst;
4237}
4238
4239RegisterID* BytecodeGenerator::emitIsUndefinedOrNull(RegisterID* dst, RegisterID* src)
4240{
4241 OpIsUndefinedOrNull::emit(this, dst, src);
4242 return dst;
4243}
4244
4245RegisterID* BytecodeGenerator::emitIsEmpty(RegisterID* dst, RegisterID* src)
4246{
4247 OpIsEmpty::emit(this, dst, src);
4248 return dst;
4249}
4250
4251RegisterID* BytecodeGenerator::emitIteratorNext(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4252{
4253 {
4254 CallArguments nextArguments(*this, nullptr);
4255 move(nextArguments.thisRegister(), iterator);
4256 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4257
4258 if (doEmitAwait == EmitAwait::Yes)
4259 emitAwait(dst);
4260 }
4261 {
4262 Ref<Label> typeIsObject = newLabel();
4263 emitJumpIfTrue(emitIsObject(newTemporary(), dst), typeIsObject.get());
4264 emitThrowTypeError("Iterator result interface is not an object."_s);
4265 emitLabel(typeIsObject.get());
4266 }
4267 return dst;
4268}
4269
4270RegisterID* BytecodeGenerator::emitIteratorNextWithValue(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, RegisterID* value, const ThrowableExpressionData* node)
4271{
4272 {
4273 CallArguments nextArguments(*this, nullptr, 1);
4274 move(nextArguments.thisRegister(), iterator);
4275 move(nextArguments.argumentRegister(0), value);
4276 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4277 }
4278
4279 return dst;
4280}
4281
4282void BytecodeGenerator::emitIteratorClose(RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4283{
4284 Ref<Label> done = newLabel();
4285 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator, propertyNames().returnKeyword);
4286 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), done.get());
4287
4288 RefPtr<RegisterID> value = newTemporary();
4289 CallArguments returnArguments(*this, nullptr);
4290 move(returnArguments.thisRegister(), iterator);
4291 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4292
4293 if (doEmitAwait == EmitAwait::Yes)
4294 emitAwait(value.get());
4295
4296 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), done.get());
4297 emitThrowTypeError("Iterator result interface is not an object."_s);
4298 emitLabel(done.get());
4299}
4300
4301void BytecodeGenerator::pushIndexedForInScope(RegisterID* localRegister, RegisterID* indexRegister)
4302{
4303 if (!localRegister)
4304 return;
4305 unsigned bodyBytecodeStartOffset = instructions().size();
4306 m_forInContextStack.append(adoptRef(*new IndexedForInContext(localRegister, indexRegister, bodyBytecodeStartOffset)));
4307}
4308
4309void BytecodeGenerator::popIndexedForInScope(RegisterID* localRegister)
4310{
4311 if (!localRegister)
4312 return;
4313 unsigned bodyBytecodeEndOffset = instructions().size();
4314 m_forInContextStack.last()->asIndexedForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4315 m_forInContextStack.removeLast();
4316}
4317
4318RegisterID* BytecodeGenerator::emitLoadArrowFunctionLexicalEnvironment(const Identifier& identifier)
4319{
4320 ASSERT(m_codeBlock->isArrowFunction() || m_codeBlock->isArrowFunctionContext() || constructorKind() == ConstructorKind::Extends || m_codeType == EvalCode);
4321
4322 return emitResolveScope(nullptr, variable(identifier, ThisResolutionType::Scoped));
4323}
4324
4325void BytecodeGenerator::emitLoadThisFromArrowFunctionLexicalEnvironment()
4326{
4327 emitGetFromScope(thisRegister(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier), variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped), DoNotThrowIfNotFound);
4328}
4329
4330RegisterID* BytecodeGenerator::emitLoadNewTargetFromArrowFunctionLexicalEnvironment()
4331{
4332 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4333
4334 return emitGetFromScope(m_newTargetRegister, emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().newTargetLocalPrivateName()), newTargetVar, ThrowIfNotFound);
4335
4336}
4337
4338RegisterID* BytecodeGenerator::emitLoadDerivedConstructorFromArrowFunctionLexicalEnvironment()
4339{
4340 Variable protoScopeVar = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4341 return emitGetFromScope(newTemporary(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().derivedConstructorPrivateName()), protoScopeVar, ThrowIfNotFound);
4342}
4343
4344RegisterID* BytecodeGenerator::ensureThis()
4345{
4346 if (constructorKind() == ConstructorKind::Extends || isDerivedConstructorContext()) {
4347 if ((needsToUpdateArrowFunctionContext() && isSuperCallUsedInInnerArrowFunction()) || m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionBodyMode)
4348 emitLoadThisFromArrowFunctionLexicalEnvironment();
4349
4350 emitTDZCheck(thisRegister());
4351 }
4352
4353 return thisRegister();
4354}
4355
4356bool BytecodeGenerator::isThisUsedInInnerArrowFunction()
4357{
4358 return m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4359}
4360
4361bool BytecodeGenerator::isArgumentsUsedInInnerArrowFunction()
4362{
4363 return m_scopeNode->doAnyInnerArrowFunctionsUseArguments() || m_scopeNode->doAnyInnerArrowFunctionsUseEval();
4364}
4365
4366bool BytecodeGenerator::isNewTargetUsedInInnerArrowFunction()
4367{
4368 return m_scopeNode->doAnyInnerArrowFunctionsUseNewTarget() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4369}
4370
4371bool BytecodeGenerator::isSuperUsedInInnerArrowFunction()
4372{
4373 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4374}
4375
4376bool BytecodeGenerator::isSuperCallUsedInInnerArrowFunction()
4377{
4378 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4379}
4380
4381void BytecodeGenerator::emitPutNewTargetToArrowFunctionContextScope()
4382{
4383 if (isNewTargetUsedInInnerArrowFunction()) {
4384 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4385
4386 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4387 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, newTargetVar, newTarget(), DoNotThrowIfNotFound, InitializationMode::Initialization);
4388 }
4389}
4390
4391void BytecodeGenerator::emitPutDerivedConstructorToArrowFunctionContextScope()
4392{
4393 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
4394 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4395
4396 Variable protoScope = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4397 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, protoScope, &m_calleeRegister, DoNotThrowIfNotFound, InitializationMode::Initialization);
4398 }
4399}
4400
4401void BytecodeGenerator::emitPutThisToArrowFunctionContextScope()
4402{
4403 if (isThisUsedInInnerArrowFunction() || (m_scopeNode->usesSuperCall() && m_codeType == EvalCode)) {
4404 ASSERT(isDerivedConstructorContext() || m_arrowFunctionContextLexicalEnvironmentRegister != nullptr);
4405
4406 Variable thisVar = variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped);
4407 RegisterID* scope = isDerivedConstructorContext() ? emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier) : m_arrowFunctionContextLexicalEnvironmentRegister;
4408
4409 emitPutToScope(scope, thisVar, thisRegister(), ThrowIfNotFound, InitializationMode::NotInitialization);
4410 }
4411}
4412
4413void BytecodeGenerator::pushStructureForInScope(RegisterID* localRegister, RegisterID* indexRegister, RegisterID* propertyRegister, RegisterID* enumeratorRegister)
4414{
4415 if (!localRegister)
4416 return;
4417 unsigned bodyBytecodeStartOffset = instructions().size();
4418 m_forInContextStack.append(adoptRef(*new StructureForInContext(localRegister, indexRegister, propertyRegister, enumeratorRegister, bodyBytecodeStartOffset)));
4419}
4420
4421void BytecodeGenerator::popStructureForInScope(RegisterID* localRegister)
4422{
4423 if (!localRegister)
4424 return;
4425 unsigned bodyBytecodeEndOffset = instructions().size();
4426 m_forInContextStack.last()->asStructureForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4427 m_forInContextStack.removeLast();
4428}
4429
4430RegisterID* BytecodeGenerator::emitRestParameter(RegisterID* result, unsigned numParametersToSkip)
4431{
4432 RefPtr<RegisterID> restArrayLength = newTemporary();
4433 OpGetRestLength::emit(this, restArrayLength.get(), numParametersToSkip);
4434
4435 OpCreateRest::emit(this, result, restArrayLength.get(), numParametersToSkip);
4436
4437 return result;
4438}
4439
4440void BytecodeGenerator::emitRequireObjectCoercible(RegisterID* value, const String& error)
4441{
4442 // FIXME: op_jneq_null treats "undetectable" objects as null/undefined. RequireObjectCoercible
4443 // thus incorrectly throws a TypeError for interfaces like HTMLAllCollection.
4444 Ref<Label> target = newLabel();
4445 OpJneqNull::emit(this, value, target->bind(this));
4446 emitThrowTypeError(error);
4447 emitLabel(target.get());
4448}
4449
4450void BytecodeGenerator::emitYieldPoint(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4451{
4452 Ref<Label> mergePoint = newLabel();
4453 unsigned yieldPointIndex = m_yieldPoints++;
4454 emitGeneratorStateChange(yieldPointIndex + 1);
4455
4456 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode) {
4457 int suspendReason = static_cast<int32_t>(result);
4458 emitPutById(generatorRegister(), propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(suspendReason)));
4459 }
4460
4461 // Split the try range here.
4462 Ref<Label> savePoint = newEmittedLabel();
4463 for (unsigned i = m_tryContextStack.size(); i--;) {
4464 TryContext& context = m_tryContextStack[i];
4465 m_tryRanges.append(TryRange {
4466 context.start.copyRef(),
4467 savePoint.copyRef(),
4468 context.tryData
4469 });
4470 // Try range will be restared at the merge point.
4471 context.start = mergePoint.get();
4472 }
4473 Vector<TryContext> savedTryContextStack;
4474 m_tryContextStack.swap(savedTryContextStack);
4475
4476
4477#if CPU(NEEDS_ALIGNED_ACCESS)
4478 // conservatively align for the bytecode rewriter: it will delete this yield and
4479 // append a fragment, so we make sure that the start of the fragments is aligned
4480 while (m_writer.position() % OpcodeSize::Wide)
4481 OpNop::emit<OpcodeSize::Narrow>(this);
4482#endif
4483 OpYield::emit(this, generatorFrameRegister(), yieldPointIndex, argument);
4484
4485 // Restore the try contexts, which start offset is updated to the merge point.
4486 m_tryContextStack.swap(savedTryContextStack);
4487 emitLabel(mergePoint.get());
4488}
4489
4490RegisterID* BytecodeGenerator::emitYield(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4491{
4492 emitYieldPoint(argument, result);
4493
4494 Ref<Label> normalLabel = newLabel();
4495 RefPtr<RegisterID> condition = newTemporary();
4496 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4497 emitJumpIfTrue(condition.get(), normalLabel.get());
4498
4499 Ref<Label> throwLabel = newLabel();
4500 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ThrowMode))));
4501 emitJumpIfTrue(condition.get(), throwLabel.get());
4502 // Return.
4503 {
4504 RefPtr<RegisterID> returnRegister = generatorValueRegister();
4505 bool hasFinally = emitReturnViaFinallyIfNeeded(returnRegister.get());
4506 if (!hasFinally)
4507 emitReturn(returnRegister.get());
4508 }
4509
4510 // Throw.
4511 emitLabel(throwLabel.get());
4512 emitThrow(generatorValueRegister());
4513
4514 // Normal.
4515 emitLabel(normalLabel.get());
4516 return generatorValueRegister();
4517}
4518
4519RegisterID* BytecodeGenerator::emitCallIterator(RegisterID* iterator, RegisterID* argument, ThrowableExpressionData* node)
4520{
4521 CallArguments args(*this, nullptr);
4522 move(args.thisRegister(), argument);
4523 emitCall(iterator, iterator, NoExpectedFunction, args, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4524
4525 return iterator;
4526}
4527
4528void BytecodeGenerator::emitAwait(RegisterID* value)
4529{
4530 emitYield(value, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Await);
4531 move(value, generatorValueRegister());
4532}
4533
4534RegisterID* BytecodeGenerator::emitGetIterator(RegisterID* argument, ThrowableExpressionData* node)
4535{
4536 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().iteratorSymbol);
4537 emitCallIterator(iterator.get(), argument, node);
4538
4539 return iterator.get();
4540}
4541
4542RegisterID* BytecodeGenerator::emitGetAsyncIterator(RegisterID* argument, ThrowableExpressionData* node)
4543{
4544 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().asyncIteratorSymbol);
4545 Ref<Label> asyncIteratorNotFound = newLabel();
4546 Ref<Label> asyncIteratorFound = newLabel();
4547 Ref<Label> iteratorReceived = newLabel();
4548
4549 emitJumpIfTrue(emitUnaryOp<OpEqNull>(newTemporary(), iterator.get()), asyncIteratorNotFound.get());
4550
4551 emitJump(asyncIteratorFound.get());
4552 emitLabel(asyncIteratorNotFound.get());
4553
4554 RefPtr<RegisterID> commonIterator = emitGetIterator(argument, node);
4555 move(iterator.get(), commonIterator.get());
4556
4557 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4558
4559 auto varCreateAsyncFromSyncIterator = variable(propertyNames().builtinNames().createAsyncFromSyncIteratorPrivateName());
4560 RefPtr<RegisterID> scope = newTemporary();
4561 move(scope.get(), emitResolveScope(scope.get(), varCreateAsyncFromSyncIterator));
4562 RefPtr<RegisterID> createAsyncFromSyncIterator = emitGetFromScope(newTemporary(), scope.get(), varCreateAsyncFromSyncIterator, ThrowIfNotFound);
4563
4564 CallArguments args(*this, nullptr, 2);
4565 emitLoad(args.thisRegister(), jsUndefined());
4566
4567 move(args.argumentRegister(0), iterator.get());
4568 move(args.argumentRegister(1), nextMethod.get());
4569
4570 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
4571 emitCall(iterator.get(), createAsyncFromSyncIterator.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
4572
4573 emitJump(iteratorReceived.get());
4574
4575 emitLabel(asyncIteratorFound.get());
4576 emitCallIterator(iterator.get(), argument, node);
4577 emitLabel(iteratorReceived.get());
4578
4579 return iterator.get();
4580}
4581
4582RegisterID* BytecodeGenerator::emitDelegateYield(RegisterID* argument, ThrowableExpressionData* node)
4583{
4584 RefPtr<RegisterID> value = newTemporary();
4585 {
4586 RefPtr<RegisterID> iterator = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? emitGetAsyncIterator(argument, node) : emitGetIterator(argument, node);
4587 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4588
4589 Ref<Label> loopDone = newLabel();
4590 {
4591 Ref<Label> nextElement = newLabel();
4592 emitLoad(value.get(), jsUndefined());
4593
4594 emitJump(nextElement.get());
4595
4596 Ref<Label> loopStart = newLabel();
4597 emitLabel(loopStart.get());
4598 emitLoopHint();
4599
4600 Ref<Label> branchOnResult = newLabel();
4601 {
4602 emitYieldPoint(value.get(), JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Yield);
4603
4604 Ref<Label> normalLabel = newLabel();
4605 Ref<Label> returnLabel = newLabel();
4606 {
4607 RefPtr<RegisterID> condition = newTemporary();
4608 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4609 emitJumpIfTrue(condition.get(), normalLabel.get());
4610
4611 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ReturnMode))));
4612 emitJumpIfTrue(condition.get(), returnLabel.get());
4613
4614 // Fallthrough to ThrowMode.
4615 }
4616
4617 // Throw.
4618 {
4619 Ref<Label> throwMethodFound = newLabel();
4620 RefPtr<RegisterID> throwMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().throwKeyword);
4621 emitJumpIfFalse(emitIsUndefined(newTemporary(), throwMethod.get()), throwMethodFound.get());
4622
4623 EmitAwait emitAwaitInIteratorClose = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? EmitAwait::Yes : EmitAwait::No;
4624 emitIteratorClose(iterator.get(), node, emitAwaitInIteratorClose);
4625
4626 emitThrowTypeError("Delegated generator does not have a 'throw' method."_s);
4627
4628 emitLabel(throwMethodFound.get());
4629 CallArguments throwArguments(*this, nullptr, 1);
4630 move(throwArguments.thisRegister(), iterator.get());
4631 move(throwArguments.argumentRegister(0), generatorValueRegister());
4632 emitCall(value.get(), throwMethod.get(), NoExpectedFunction, throwArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4633
4634 emitJump(branchOnResult.get());
4635 }
4636
4637 // Return.
4638 emitLabel(returnLabel.get());
4639 {
4640 Ref<Label> returnMethodFound = newLabel();
4641 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4642 emitJumpIfFalse(emitIsUndefined(newTemporary(), returnMethod.get()), returnMethodFound.get());
4643
4644 move(value.get(), generatorValueRegister());
4645
4646 Ref<Label> returnSequence = newLabel();
4647 emitJump(returnSequence.get());
4648
4649 emitLabel(returnMethodFound.get());
4650 CallArguments returnArguments(*this, nullptr, 1);
4651 move(returnArguments.thisRegister(), iterator.get());
4652 move(returnArguments.argumentRegister(0), generatorValueRegister());
4653 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4654
4655 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4656 emitAwait(value.get());
4657
4658 Ref<Label> returnIteratorResultIsObject = newLabel();
4659 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), returnIteratorResultIsObject.get());
4660 emitThrowTypeError("Iterator result interface is not an object."_s);
4661
4662 emitLabel(returnIteratorResultIsObject.get());
4663
4664 Ref<Label> returnFromGenerator = newLabel();
4665 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), returnFromGenerator.get());
4666
4667 emitGetById(value.get(), value.get(), propertyNames().value);
4668 emitJump(loopStart.get());
4669
4670 emitLabel(returnFromGenerator.get());
4671 emitGetById(value.get(), value.get(), propertyNames().value);
4672
4673 emitLabel(returnSequence.get());
4674 bool hasFinally = emitReturnViaFinallyIfNeeded(value.get());
4675 if (!hasFinally)
4676 emitReturn(value.get());
4677 }
4678
4679 // Normal.
4680 emitLabel(normalLabel.get());
4681 move(value.get(), generatorValueRegister());
4682 }
4683
4684 emitLabel(nextElement.get());
4685 emitIteratorNextWithValue(value.get(), nextMethod.get(), iterator.get(), value.get(), node);
4686
4687 emitLabel(branchOnResult.get());
4688
4689 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4690 emitAwait(value.get());
4691
4692 Ref<Label> iteratorValueIsObject = newLabel();
4693 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), iteratorValueIsObject.get());
4694 emitThrowTypeError("Iterator result interface is not an object."_s);
4695 emitLabel(iteratorValueIsObject.get());
4696
4697 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4698 emitGetById(value.get(), value.get(), propertyNames().value);
4699
4700 emitJump(loopStart.get());
4701 }
4702 emitLabel(loopDone.get());
4703 }
4704
4705 emitGetById(value.get(), value.get(), propertyNames().value);
4706 return value.get();
4707}
4708
4709
4710void BytecodeGenerator::emitGeneratorStateChange(int32_t state)
4711{
4712 RegisterID* completedState = emitLoad(nullptr, jsNumber(state));
4713 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorStatePrivateName(), completedState);
4714}
4715
4716bool BytecodeGenerator::emitJumpViaFinallyIfNeeded(int targetLabelScopeDepth, Label& jumpTarget)
4717{
4718 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
4719 size_t numberOfScopesToCheckForFinally = labelScopeDepth() - targetLabelScopeDepth;
4720 ASSERT(numberOfScopesToCheckForFinally <= m_controlFlowScopeStack.size());
4721 if (!numberOfScopesToCheckForFinally)
4722 return false;
4723
4724 FinallyContext* innermostFinallyContext = nullptr;
4725 FinallyContext* outermostFinallyContext = nullptr;
4726 size_t scopeIndex = m_controlFlowScopeStack.size() - 1;
4727 while (numberOfScopesToCheckForFinally--) {
4728 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex--];
4729 if (scope->isFinallyScope()) {
4730 FinallyContext* finallyContext = scope->finallyContext;
4731 if (!innermostFinallyContext)
4732 innermostFinallyContext = finallyContext;
4733 outermostFinallyContext = finallyContext;
4734 finallyContext->incNumberOfBreaksOrContinues();
4735 }
4736 }
4737 if (!outermostFinallyContext)
4738 return false; // No finallys to thread through.
4739
4740 auto jumpID = bytecodeOffsetToJumpID(instructions().size());
4741 int lexicalScopeIndex = labelScopeDepthToLexicalScopeIndex(targetLabelScopeDepth);
4742 outermostFinallyContext->registerJump(jumpID, lexicalScopeIndex, jumpTarget);
4743
4744 emitLoad(innermostFinallyContext->completionTypeRegister(), jumpID);
4745 emitJump(*innermostFinallyContext->finallyLabel());
4746 return true; // We'll be jumping to a finally block.
4747}
4748
4749bool BytecodeGenerator::emitReturnViaFinallyIfNeeded(RegisterID* returnRegister)
4750{
4751 size_t numberOfScopesToCheckForFinally = m_controlFlowScopeStack.size();
4752 if (!numberOfScopesToCheckForFinally)
4753 return false;
4754
4755 FinallyContext* innermostFinallyContext = nullptr;
4756 while (numberOfScopesToCheckForFinally) {
4757 size_t scopeIndex = --numberOfScopesToCheckForFinally;
4758 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex];
4759 if (scope->isFinallyScope()) {
4760 FinallyContext* finallyContext = scope->finallyContext;
4761 if (!innermostFinallyContext)
4762 innermostFinallyContext = finallyContext;
4763 finallyContext->setHandlesReturns();
4764 }
4765 }
4766 if (!innermostFinallyContext)
4767 return false; // No finallys to thread through.
4768
4769 emitLoad(innermostFinallyContext->completionTypeRegister(), CompletionType::Return);
4770 move(innermostFinallyContext->completionValueRegister(), returnRegister);
4771 emitJump(*innermostFinallyContext->finallyLabel());
4772 return true; // We'll be jumping to a finally block.
4773}
4774
4775void BytecodeGenerator::emitFinallyCompletion(FinallyContext& context, Label& normalCompletionLabel)
4776{
4777 if (context.numberOfBreaksOrContinues() || context.handlesReturns()) {
4778 emitJumpIf<OpStricteq>(context.completionTypeRegister(), CompletionType::Normal, normalCompletionLabel);
4779
4780 FinallyContext* outerContext = context.outerContext();
4781
4782 size_t numberOfJumps = context.numberOfJumps();
4783 ASSERT(outerContext || numberOfJumps == context.numberOfBreaksOrContinues());
4784
4785 // Handle Break or Continue completions that jumps into this FinallyContext.
4786 for (size_t i = 0; i < numberOfJumps; i++) {
4787 Ref<Label> nextLabel = newLabel();
4788 auto& jump = context.jumps(i);
4789 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), jump.jumpID, nextLabel.get());
4790
4791 // This case is for Break / Continue completions from an inner finally context
4792 // with a jump target that is not beyond the next outer finally context:
4793 //
4794 // try {
4795 // for (... stuff ...) {
4796 // try {
4797 // continue; // Sets completionType to jumpID of top of the for loop.
4798 // } finally {
4799 // } // Jump to top of the for loop on completion.
4800 // }
4801 // } finally {
4802 // }
4803 //
4804 // Since the jumpID is targetting a label that is inside the outer finally context,
4805 // we can jump to it directly on completion of this finally context: there is no intermediate
4806 // finally blocks to run. After the Break / Continue, we will contnue execution as normal.
4807 // So, we'll set the completionType to Normal (on behalf of the target) before we jump.
4808 // We can also set the completion value to undefined, but it will never be used for normal
4809 // completion anyway. So, we'll skip setting it.
4810
4811 restoreScopeRegister(jump.targetLexicalScopeIndex);
4812 emitLoad(context.completionTypeRegister(), CompletionType::Normal);
4813 emitJump(jump.targetLabel.get());
4814
4815 emitLabel(nextLabel.get());
4816 }
4817
4818 // Handle completions that take us out of this FinallyContext.
4819 if (outerContext) {
4820 if (context.handlesReturns()) {
4821 Ref<Label> isNotReturnLabel = newLabel();
4822 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, isNotReturnLabel.get());
4823
4824 // This case is for Return completion from an inner finally context:
4825 //
4826 // try {
4827 // try {
4828 // return result; // Sets completionType to Return, and completionValue to result.
4829 // } finally {
4830 // } // Jump to outer finally on completion.
4831 // } finally {
4832 // }
4833 //
4834 // Since we know there's at least one outer finally context (beyond the current context),
4835 // we cannot actually return from here. Instead, we pass the completionType and completionValue
4836 // on to the next outer finally, and let it decide what to do next on its completion. The
4837 // outer finally may or may not actual return depending on whether it encounters an abrupt
4838 // completion in its body that overrrides this Return completion.
4839
4840 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4841 move(outerContext->completionValueRegister(), context.completionValueRegister());
4842 emitJump(*outerContext->finallyLabel());
4843
4844 emitLabel(isNotReturnLabel.get());
4845 }
4846
4847 bool hasBreaksOrContinuesThatEscapeCurrentFinally = context.numberOfBreaksOrContinues() > numberOfJumps;
4848 if (hasBreaksOrContinuesThatEscapeCurrentFinally) {
4849 Ref<Label> isThrowOrNormalLabel = newLabel();
4850 emitJumpIf<OpBeloweq>(context.completionTypeRegister(), CompletionType::Throw, isThrowOrNormalLabel.get());
4851
4852 // A completionType above Throw means we have a Break or Continue encoded as a jumpID.
4853 // We already ruled out Return above.
4854 static_assert(CompletionType::Throw < CompletionType::Return && CompletionType::Throw < CompletionType::Return, "jumpIDs are above CompletionType::Return");
4855
4856 // This case is for Break / Continue completions in an inner finally context:
4857 //
4858 // 10: label:
4859 // 11: try {
4860 // 12: try {
4861 // 13: for (... stuff ...)
4862 // 14: break label; // Sets completionType to jumpID of label.
4863 // 15: } finally {
4864 // 16: } // Jumps to outer finally on completion.
4865 // 17: } finally {
4866 // 18: }
4867 //
4868 // The break (line 14) says to continue execution at the label at line 10. Before we can
4869 // goto line 10, the inner context's finally (line 15) needs to be run, followed by the
4870 // outer context's finally (line 17). 'outerContext' being non-null above tells us that
4871 // there is at least one outer finally context that we need to run after we complete the
4872 // current finally. Note that unless the body of the outer finally abruptly completes in a
4873 // different way, that outer finally also needs to complete with a Break / Continue to
4874 // the same target label. Hence, we need to pass the jumpID in this finally's completionTypeRegister
4875 // to the outer finally. The completion value for Break and Continue according to the spec
4876 // is undefined, but it won't ever be used. So, we'll skip setting it.
4877 //
4878 // Note that all we're doing here is passing the Break / Continue completion to the next
4879 // outer finally context. We don't worry about finally contexts beyond that. It is the
4880 // responsibility of the next outer finally to determine what to do next at its completion,
4881 // and pass on to the next outer context if present and needed.
4882
4883 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4884 emitJump(*outerContext->finallyLabel());
4885
4886 emitLabel(isThrowOrNormalLabel.get());
4887 }
4888
4889 } else {
4890 // We are the outermost finally.
4891 if (context.handlesReturns()) {
4892 Ref<Label> notReturnLabel = newLabel();
4893 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, notReturnLabel.get());
4894
4895 // This case is for Return completion from the outermost finally context:
4896 //
4897 // try {
4898 // return result; // Sets completionType to Return, and completionValue to result.
4899 // } finally {
4900 // } // Executes the return of the completionValue.
4901 //
4902 // Since we know there's no outer finally context (beyond the current context) to run,
4903 // we can actually execute a return for this Return completion. The value to return
4904 // is whatever is in the completionValueRegister.
4905
4906 emitWillLeaveCallFrameDebugHook();
4907 emitReturn(context.completionValueRegister(), ReturnFrom::Finally);
4908
4909 emitLabel(notReturnLabel.get());
4910 }
4911 }
4912 }
4913
4914 // By now, we've rule out all Break / Continue / Return completions above. The only remaining
4915 // possibilities are Normal or Throw.
4916
4917 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Throw, normalCompletionLabel);
4918
4919 // We get here because we entered this finally context with Throw completionType (i.e. we have
4920 // an exception that we need to rethrow), and we didn't encounter a different abrupt completion
4921 // that overrides that incoming completionType. All we have to do here is re-throw the exception
4922 // captured in the completionValue.
4923 //
4924 // Note that unlike for Break / Continue / Return, we don't need to worry about outer finally
4925 // contexts. This is because any outer finally context (if present) will have its own exception
4926 // handler, which will take care of receiving the Throw completion, and re-capturing the exception
4927 // in its completionValue.
4928
4929 emitThrow(context.completionValueRegister());
4930}
4931
4932template<typename CompareOp>
4933void BytecodeGenerator::emitJumpIf(RegisterID* completionTypeRegister, CompletionType type, Label& jumpTarget)
4934{
4935 RefPtr<RegisterID> tempRegister = newTemporary();
4936 RegisterID* valueConstant = addConstantValue(jsNumber(static_cast<int>(type)));
4937 OperandTypes operandTypes = OperandTypes(ResultType::numberTypeIsInt32(), ResultType::unknownType());
4938
4939 auto equivalenceResult = emitBinaryOp<CompareOp>(tempRegister.get(), completionTypeRegister, valueConstant, operandTypes);
4940 emitJumpIfTrue(equivalenceResult, jumpTarget);
4941}
4942
4943void ForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4944{
4945 // Lexically invalidating ForInContexts is kind of weak sauce, but it only occurs if
4946 // either of the following conditions is true:
4947 //
4948 // (1) The loop iteration variable is re-assigned within the body of the loop.
4949 // (2) The loop iteration variable is captured in the lexical scope of the function.
4950 //
4951 // These two situations occur sufficiently rarely that it's okay to use this style of
4952 // "analysis" to make iteration faster. If we didn't want to do this, we would either have
4953 // to perform some flow-sensitive analysis to see if/when the loop iteration variable was
4954 // reassigned, or we'd have to resort to runtime checks to see if the variable had been
4955 // reassigned from its original value.
4956
4957 for (unsigned offset = bodyBytecodeStartOffset(); isValid() && offset < bodyBytecodeEndOffset;) {
4958 auto instruction = generator.instructions().at(offset);
4959 OpcodeID opcodeID = instruction->opcodeID();
4960
4961 ASSERT(opcodeID != op_enter);
4962 computeDefsForBytecodeOffset(codeBlock, opcodeID, instruction.ptr(), [&] (VirtualRegister operand) {
4963 if (local()->virtualRegister() == operand)
4964 invalidate();
4965 });
4966 offset += instruction->size();
4967 }
4968}
4969
4970void StructureForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4971{
4972 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
4973 if (isValid())
4974 return;
4975
4976 OpcodeID lastOpcodeID = generator.m_lastOpcodeID;
4977 InstructionStream::MutableRef lastInstruction = generator.m_lastInstruction;
4978 for (const auto& instTuple : m_getInsts) {
4979 unsigned instIndex = std::get<0>(instTuple);
4980 int propertyRegIndex = std::get<1>(instTuple);
4981 auto instruction = generator.m_writer.ref(instIndex);
4982 auto end = instIndex + instruction->size();
4983 ASSERT(instruction->isWide());
4984
4985 generator.m_writer.seek(instIndex);
4986
4987 auto bytecode = instruction->as<OpGetDirectPname>();
4988
4989 // disable peephole optimizations
4990 generator.m_lastOpcodeID = op_end;
4991
4992 // Change the opcode to get_by_val.
4993 // 1. dst stays the same.
4994 // 2. base stays the same.
4995 // 3. property gets switched to the original property.
4996 OpGetByVal::emit<OpcodeSize::Wide>(&generator, bytecode.m_dst, bytecode.m_base, VirtualRegister(propertyRegIndex));
4997
4998 // 4. nop out the remaining bytes
4999 while (generator.m_writer.position() < end)
5000 OpNop::emit<OpcodeSize::Narrow>(&generator);
5001 }
5002 generator.m_writer.seek(generator.m_writer.size());
5003 if (generator.m_lastInstruction.offset() + generator.m_lastInstruction->size() != generator.m_writer.size()) {
5004 generator.m_lastOpcodeID = lastOpcodeID;
5005 generator.m_lastInstruction = lastInstruction;
5006 }
5007}
5008
5009void IndexedForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
5010{
5011 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
5012 if (isValid())
5013 return;
5014
5015 for (const auto& instPair : m_getInsts) {
5016 unsigned instIndex = instPair.first;
5017 int propertyRegIndex = instPair.second;
5018 // FIXME: we should not have to force this get_by_val to be wide, just guarantee that propertyRegIndex fits
5019 // https://bugs.webkit.org/show_bug.cgi?id=190929
5020 generator.m_writer.ref(instIndex)->cast<OpGetByVal>()->setProperty(VirtualRegister(propertyRegIndex), []() {
5021 ASSERT_NOT_REACHED();
5022 return VirtualRegister();
5023 });
5024 }
5025}
5026
5027void StaticPropertyAnalysis::record()
5028{
5029 auto* instruction = m_instructionRef.ptr();
5030 auto size = m_propertyIndexes.size();
5031 switch (instruction->opcodeID()) {
5032 case OpNewObject::opcodeID:
5033 instruction->cast<OpNewObject>()->setInlineCapacity(size, []() {
5034 return 255;
5035 });
5036 return;
5037 case OpCreateThis::opcodeID:
5038 instruction->cast<OpCreateThis>()->setInlineCapacity(size, []() {
5039 return 255;
5040 });
5041 return;
5042 default:
5043 ASSERT_NOT_REACHED();
5044 }
5045}
5046
5047void BytecodeGenerator::emitToThis()
5048{
5049 OpToThis::emit(this, kill(&m_thisRegister));
5050 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
5051}
5052
5053} // namespace JSC
5054
5055namespace WTF {
5056
5057void printInternal(PrintStream& out, JSC::Variable::VariableKind kind)
5058{
5059 switch (kind) {
5060 case JSC::Variable::NormalVariable:
5061 out.print("Normal");
5062 return;
5063 case JSC::Variable::SpecialVariable:
5064 out.print("Special");
5065 return;
5066 }
5067 RELEASE_ASSERT_NOT_REACHED();
5068}
5069
5070} // namespace WTF
5071
5072