1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include "VM.h"
31
32#include "ArgList.h"
33#include "ArrayBufferNeuteringWatchpointSet.h"
34#include "BuiltinExecutables.h"
35#include "BytecodeIntrinsicRegistry.h"
36#include "CodeBlock.h"
37#include "CodeCache.h"
38#include "CommonIdentifiers.h"
39#include "CommonSlowPaths.h"
40#include "CustomGetterSetter.h"
41#include "DFGWorklist.h"
42#include "DirectEvalExecutable.h"
43#include "Disassembler.h"
44#include "DoublePredictionFuzzerAgent.h"
45#include "Error.h"
46#include "ErrorConstructor.h"
47#include "ErrorInstance.h"
48#include "EvalCodeBlock.h"
49#include "Exception.h"
50#include "ExecutableToCodeBlockEdge.h"
51#include "FTLThunks.h"
52#include "FastMallocAlignedMemoryAllocator.h"
53#include "FunctionCodeBlock.h"
54#include "FunctionConstructor.h"
55#include "FunctionExecutable.h"
56#include "GCActivityCallback.h"
57#include "GetterSetter.h"
58#include "GigacageAlignedMemoryAllocator.h"
59#include "HasOwnPropertyCache.h"
60#include "Heap.h"
61#include "HeapIterationScope.h"
62#include "HeapProfiler.h"
63#include "HostCallReturnValue.h"
64#include "Identifier.h"
65#include "IncrementalSweeper.h"
66#include "IndirectEvalExecutable.h"
67#include "Interpreter.h"
68#include "IntlCollatorConstructor.h"
69#include "IntlDateTimeFormatConstructor.h"
70#include "IntlNumberFormatConstructor.h"
71#include "IntlPluralRulesConstructor.h"
72#include "JITCode.h"
73#include "JITWorklist.h"
74#include "JSAPIValueWrapper.h"
75#include "JSArray.h"
76#include "JSArrayBufferConstructor.h"
77#include "JSAsyncFunction.h"
78#include "JSBigInt.h"
79#include "JSBoundFunction.h"
80#include "JSCInlines.h"
81#include "JSCallbackFunction.h"
82#include "JSCustomGetterSetterFunction.h"
83#include "JSDestructibleObjectHeapCellType.h"
84#include "JSFixedArray.h"
85#include "JSFunction.h"
86#include "JSGlobalObjectFunctions.h"
87#include "JSImmutableButterfly.h"
88#include "JSInternalPromiseDeferred.h"
89#include "JSLock.h"
90#include "JSMap.h"
91#include "JSMapIterator.h"
92#include "JSPromiseDeferred.h"
93#include "JSPropertyNameEnumerator.h"
94#include "JSScriptFetchParameters.h"
95#include "JSScriptFetcher.h"
96#include "JSSet.h"
97#include "JSSetIterator.h"
98#include "JSSourceCode.h"
99#include "JSStringHeapCellType.h"
100#include "JSTemplateObjectDescriptor.h"
101#include "JSWeakMap.h"
102#include "JSWeakSet.h"
103#include "JSWebAssembly.h"
104#include "JSWebAssemblyCodeBlock.h"
105#include "JSWebAssemblyCodeBlockHeapCellType.h"
106#include "JSWithScope.h"
107#include "LLIntData.h"
108#include "Lexer.h"
109#include "Lookup.h"
110#include "MinimumReservedZoneSize.h"
111#include "ModuleProgramCodeBlock.h"
112#include "ModuleProgramExecutable.h"
113#include "NativeErrorConstructor.h"
114#include "NativeExecutable.h"
115#include "NativeStdFunctionCell.h"
116#include "Nodes.h"
117#include "ObjCCallbackFunction.h"
118#include "Parser.h"
119#include "ProfilerDatabase.h"
120#include "ProgramCodeBlock.h"
121#include "ProgramExecutable.h"
122#include "PromiseDeferredTimer.h"
123#include "PropertyMapHashTable.h"
124#include "ProxyRevoke.h"
125#include "RandomizingFuzzerAgent.h"
126#include "RegExpCache.h"
127#include "RegExpObject.h"
128#include "RegisterAtOffsetList.h"
129#include "RuntimeType.h"
130#include "SamplingProfiler.h"
131#include "ShadowChicken.h"
132#include "SimpleTypedArrayController.h"
133#include "SourceProviderCache.h"
134#include "StackVisitor.h"
135#include "StrictEvalActivation.h"
136#include "StrongInlines.h"
137#include "StructureInlines.h"
138#include "TestRunnerUtils.h"
139#include "ThunkGenerators.h"
140#include "TypeProfiler.h"
141#include "TypeProfilerLog.h"
142#include "UnlinkedCodeBlock.h"
143#include "VMEntryScope.h"
144#include "VMInlines.h"
145#include "VMInspector.h"
146#include "VariableEnvironment.h"
147#include "WasmWorklist.h"
148#include "Watchdog.h"
149#include "WeakGCMapInlines.h"
150#include "WebAssemblyFunction.h"
151#include "WebAssemblyFunctionHeapCellType.h"
152#include "WebAssemblyWrapperFunction.h"
153#include <wtf/ProcessID.h>
154#include <wtf/ReadWriteLock.h>
155#include <wtf/SimpleStats.h>
156#include <wtf/StringPrintStream.h>
157#include <wtf/Threading.h>
158#include <wtf/text/AtomStringTable.h>
159#include <wtf/text/SymbolRegistry.h>
160
161#if ENABLE(C_LOOP)
162#include "CLoopStack.h"
163#include "CLoopStackInlines.h"
164#endif
165
166#if ENABLE(DFG_JIT)
167#include "ConservativeRoots.h"
168#endif
169
170#if ENABLE(REGEXP_TRACING)
171#include "RegExp.h"
172#endif
173
174namespace JSC {
175
176#if ENABLE(JIT)
177#if !ASSERT_DISABLED
178bool VM::s_canUseJITIsSet = false;
179#endif
180bool VM::s_canUseJIT = false;
181#endif
182
183Atomic<unsigned> VM::s_numberOfIDs;
184
185// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
186// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
187// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
188
189#if ENABLE(ASSEMBLER)
190static bool enableAssembler()
191{
192 if (!Options::useJIT())
193 return false;
194
195 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
196 if (canUseJITString && !atoi(canUseJITString))
197 return false;
198
199 ExecutableAllocator::initializeUnderlyingAllocator();
200 if (!ExecutableAllocator::singleton().isValid()) {
201 if (Options::crashIfCantAllocateJITMemory())
202 CRASH();
203 return false;
204 }
205
206 return true;
207}
208#endif // ENABLE(!ASSEMBLER)
209
210bool VM::canUseAssembler()
211{
212#if ENABLE(ASSEMBLER)
213 static std::once_flag onceKey;
214 static bool enabled = false;
215 std::call_once(onceKey, [] {
216 enabled = enableAssembler();
217 });
218 return enabled;
219#else
220 return false; // interpreter only
221#endif
222}
223
224void VM::computeCanUseJIT()
225{
226#if ENABLE(JIT)
227#if !ASSERT_DISABLED
228 RELEASE_ASSERT(!s_canUseJITIsSet);
229 s_canUseJITIsSet = true;
230#endif
231 s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
232#endif
233}
234
235inline unsigned VM::nextID()
236{
237 for (;;) {
238 unsigned currentNumberOfIDs = s_numberOfIDs.load();
239 unsigned newID = currentNumberOfIDs + 1;
240 if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
241 return newID;
242 }
243}
244
245static bool vmCreationShouldCrash = false;
246
247VM::VM(VMType vmType, HeapType heapType)
248 : m_id(nextID())
249 , m_apiLock(adoptRef(new JSLock(this)))
250#if USE(CF)
251 , m_runLoop(CFRunLoopGetCurrent())
252#endif // USE(CF)
253 , heap(this, heapType)
254 , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
255 , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
256 , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
257 , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
258 , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
259 , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
260 , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
261 , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
262 , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
263#if ENABLE(WEBASSEMBLY)
264 , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
265 , webAssemblyFunctionHeapCellType(std::make_unique<WebAssemblyFunctionHeapCellType>())
266#endif
267 , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
268 , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
269 , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
270 , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
271 , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
272 , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
273 , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
274 , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
275 , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
276 , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
277 , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
278 , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
279 , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
280 , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
281 , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
282 , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
283 , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable)
284 , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
285 , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
286 , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
287 , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
288 , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
289 , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
290 , vmType(vmType)
291 , clientData(0)
292 , topEntryFrame(nullptr)
293 , topCallFrame(CallFrame::noCaller())
294 , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
295 , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
296 , propertyNames(nullptr)
297 , emptyList(new ArgList)
298 , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
299 , customGetterSetterFunctionMap(*this)
300 , stringCache(*this)
301 , symbolImplToSymbolMap(*this)
302 , structureCache(*this)
303 , interpreter(0)
304 , entryScope(0)
305 , m_regExpCache(new RegExpCache(this))
306 , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
307#if ENABLE(REGEXP_TRACING)
308 , m_rtTraceList(new RTTraceList())
309#endif
310#if ENABLE(GC_VALIDATION)
311 , m_initializingObjectClass(0)
312#endif
313 , m_stackPointerAtVMEntry(0)
314 , m_codeCache(std::make_unique<CodeCache>())
315 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
316 , m_typeProfilerEnabledCount(0)
317 , m_primitiveGigacageEnabled(IsWatched)
318 , m_controlFlowProfilerEnabledCount(0)
319{
320 if (UNLIKELY(vmCreationShouldCrash))
321 CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
322
323 interpreter = new Interpreter(*this);
324 StackBounds stack = Thread::current().stack();
325 updateSoftReservedZoneSize(Options::softReservedZoneSize());
326 setLastStackTop(stack.origin());
327
328 JSRunLoopTimer::Manager::shared().registerVM(*this);
329
330 // Need to be careful to keep everything consistent here
331 JSLockHolder lock(this);
332 AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
333 structureStructure.set(*this, Structure::createStructure(*this));
334 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
335 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
336
337 smallStrings.initializeCommonStrings(*this);
338
339 propertyNames = new CommonIdentifiers(this);
340 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
341 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
342 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
343 domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
344 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
345 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
346 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
347 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
348 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
349 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
350#if ENABLE(WEBASSEMBLY)
351 webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
352#endif
353 moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
354 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
355 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
356 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
357 fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
358
359 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
360 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
361 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
362
363 sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
364 scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
365 scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
366 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
367 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
368 templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
369 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
370 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
371 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
372 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
373 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
374 unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
375 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
376 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
377 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
378 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
379 internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
380 nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
381 programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
382 moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
383 evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
384 functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
385 hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
386 hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
387 bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
388 executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
389
390 // Eagerly initialize constant cells since the concurrent compiler can access them.
391 if (canUseJIT()) {
392 sentinelMapBucket();
393 sentinelSetBucket();
394 }
395
396 Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
397
398#if !ENABLE(C_LOOP)
399 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
400#endif
401
402 Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
403
404 heap.notifyIsSafeToCollect();
405
406 LLInt::Data::performAssertions(*this);
407
408 if (UNLIKELY(Options::useProfiler())) {
409 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
410
411 StringPrintStream pathOut;
412 const char* profilerPath = getenv("JSC_PROFILER_PATH");
413 if (profilerPath)
414 pathOut.print(profilerPath, "/");
415 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
416 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
417 }
418
419 callFrameForCatch = nullptr;
420
421 // Initialize this last, as a free way of asserting that VM initialization itself
422 // won't use this.
423 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
424
425 m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
426
427 if (Options::useTypeProfiler())
428 enableTypeProfiler();
429 if (Options::useControlFlowProfiler())
430 enableControlFlowProfiler();
431#if ENABLE(SAMPLING_PROFILER)
432 if (Options::useSamplingProfiler()) {
433 setShouldBuildPCToCodeOriginMapping();
434 Ref<Stopwatch> stopwatch = Stopwatch::create();
435 stopwatch->start();
436 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
437 if (Options::samplingProfilerPath())
438 m_samplingProfiler->registerForReportAtExit();
439 m_samplingProfiler->start();
440 }
441#endif // ENABLE(SAMPLING_PROFILER)
442
443 if (Options::useRandomizingFuzzerAgent())
444 setFuzzerAgent(std::make_unique<RandomizingFuzzerAgent>(*this));
445 else if (Options::useDoublePredictionFuzzerAgent())
446 setFuzzerAgent(std::make_unique<DoublePredictionFuzzerAgent>(*this));
447
448 if (Options::alwaysGeneratePCToCodeOriginMap())
449 setShouldBuildPCToCodeOriginMapping();
450
451 if (Options::watchdog()) {
452 Watchdog& watchdog = ensureWatchdog();
453 watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
454 }
455
456#if ENABLE(JIT)
457 // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
458 if (canUseJIT()) {
459 jitStubs = std::make_unique<JITThunks>();
460#if ENABLE(FTL_JIT)
461 ftlThunks = std::make_unique<FTL::Thunks>();
462#endif // ENABLE(FTL_JIT)
463 getCTIInternalFunctionTrampolineFor(CodeForCall);
464 getCTIInternalFunctionTrampolineFor(CodeForConstruct);
465 }
466#endif
467
468 if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
469 ensureShadowChicken();
470
471 VMInspector::instance().add(this);
472}
473
474static ReadWriteLock s_destructionLock;
475
476void waitForVMDestruction()
477{
478 auto locker = holdLock(s_destructionLock.write());
479}
480
481VM::~VM()
482{
483 auto destructionLocker = holdLock(s_destructionLock.read());
484
485 Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
486 promiseDeferredTimer->stopRunningTasks();
487#if ENABLE(WEBASSEMBLY)
488 if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
489 worklist->stopAllPlansForContext(wasmContext);
490#endif
491 if (UNLIKELY(m_watchdog))
492 m_watchdog->willDestroyVM(this);
493 m_traps.willDestroyVM();
494 VMInspector::instance().remove(this);
495
496 // Never GC, ever again.
497 heap.incrementDeferralDepth();
498
499#if ENABLE(SAMPLING_PROFILER)
500 if (m_samplingProfiler) {
501 m_samplingProfiler->reportDataToOptionFile();
502 m_samplingProfiler->shutdown();
503 }
504#endif // ENABLE(SAMPLING_PROFILER)
505
506#if ENABLE(JIT)
507 if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
508 worklist->completeAllForVM(*this);
509#endif // ENABLE(JIT)
510
511#if ENABLE(DFG_JIT)
512 // Make sure concurrent compilations are done, but don't install them, since there is
513 // no point to doing so.
514 for (unsigned i = DFG::numberOfWorklists(); i--;) {
515 if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
516 worklist->removeNonCompilingPlansForVM(*this);
517 worklist->waitUntilAllPlansForVMAreReady(*this);
518 worklist->removeAllReadyPlansForVM(*this);
519 }
520 }
521#endif // ENABLE(DFG_JIT)
522
523 waitForAsynchronousDisassembly();
524
525 // Clear this first to ensure that nobody tries to remove themselves from it.
526 m_perBytecodeProfiler = nullptr;
527
528 ASSERT(currentThreadIsHoldingAPILock());
529 m_apiLock->willDestroyVM(this);
530 smallStrings.setIsInitialized(false);
531 heap.lastChanceToFinalize();
532
533 JSRunLoopTimer::Manager::shared().unregisterVM(*this);
534
535 delete interpreter;
536#ifndef NDEBUG
537 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
538#endif
539
540 delete emptyList;
541
542 delete propertyNames;
543 if (vmType != Default)
544 delete m_atomStringTable;
545
546 delete clientData;
547 delete m_regExpCache;
548
549#if ENABLE(REGEXP_TRACING)
550 delete m_rtTraceList;
551#endif
552
553#if ENABLE(DFG_JIT)
554 for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
555 fastFree(m_scratchBuffers[i]);
556#endif
557}
558
559void VM::primitiveGigacageDisabledCallback(void* argument)
560{
561 static_cast<VM*>(argument)->primitiveGigacageDisabled();
562}
563
564void VM::primitiveGigacageDisabled()
565{
566 if (m_apiLock->currentThreadIsHoldingLock()) {
567 m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
568 return;
569 }
570
571 // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
572 // uncaged buffer in a nicely synchronized manner.
573 m_needToFirePrimitiveGigacageEnabled = true;
574}
575
576void VM::setLastStackTop(void* lastStackTop)
577{
578 m_lastStackTop = lastStackTop;
579}
580
581Ref<VM> VM::createContextGroup(HeapType heapType)
582{
583 return adoptRef(*new VM(APIContextGroup, heapType));
584}
585
586Ref<VM> VM::create(HeapType heapType)
587{
588 return adoptRef(*new VM(Default, heapType));
589}
590
591bool VM::sharedInstanceExists()
592{
593 return sharedInstanceInternal();
594}
595
596VM& VM::sharedInstance()
597{
598 GlobalJSLock globalLock;
599 VM*& instance = sharedInstanceInternal();
600 if (!instance)
601 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
602 return *instance;
603}
604
605VM*& VM::sharedInstanceInternal()
606{
607 static VM* sharedInstance;
608 return sharedInstance;
609}
610
611Watchdog& VM::ensureWatchdog()
612{
613 if (!m_watchdog)
614 m_watchdog = adoptRef(new Watchdog(this));
615 return *m_watchdog;
616}
617
618HeapProfiler& VM::ensureHeapProfiler()
619{
620 if (!m_heapProfiler)
621 m_heapProfiler = std::make_unique<HeapProfiler>(*this);
622 return *m_heapProfiler;
623}
624
625#if ENABLE(SAMPLING_PROFILER)
626SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
627{
628 if (!m_samplingProfiler)
629 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
630 return *m_samplingProfiler;
631}
632#endif // ENABLE(SAMPLING_PROFILER)
633
634#if ENABLE(JIT)
635static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
636{
637 switch (intrinsic) {
638 case CharCodeAtIntrinsic:
639 return charCodeAtThunkGenerator;
640 case CharAtIntrinsic:
641 return charAtThunkGenerator;
642 case Clz32Intrinsic:
643 return clz32ThunkGenerator;
644 case FromCharCodeIntrinsic:
645 return fromCharCodeThunkGenerator;
646 case SqrtIntrinsic:
647 return sqrtThunkGenerator;
648 case AbsIntrinsic:
649 return absThunkGenerator;
650 case FloorIntrinsic:
651 return floorThunkGenerator;
652 case CeilIntrinsic:
653 return ceilThunkGenerator;
654 case TruncIntrinsic:
655 return truncThunkGenerator;
656 case RoundIntrinsic:
657 return roundThunkGenerator;
658 case ExpIntrinsic:
659 return expThunkGenerator;
660 case LogIntrinsic:
661 return logThunkGenerator;
662 case IMulIntrinsic:
663 return imulThunkGenerator;
664 case RandomIntrinsic:
665 return randomThunkGenerator;
666 case BoundThisNoArgsFunctionCallIntrinsic:
667 return boundThisNoArgsFunctionCallGenerator;
668 default:
669 return nullptr;
670 }
671}
672
673#endif // ENABLE(JIT)
674
675NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
676{
677 return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
678}
679
680static Ref<NativeJITCode> jitCodeForCallTrampoline()
681{
682 static NativeJITCode* result;
683 static std::once_flag onceKey;
684 std::call_once(onceKey, [&] {
685 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
686 });
687 return makeRef(*result);
688}
689
690static Ref<NativeJITCode> jitCodeForConstructTrampoline()
691{
692 static NativeJITCode* result;
693 static std::once_flag onceKey;
694 std::call_once(onceKey, [&] {
695 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
696 });
697 return makeRef(*result);
698}
699
700NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
701{
702#if ENABLE(JIT)
703 if (canUseJIT()) {
704 return jitStubs->hostFunctionStub(
705 this, function, constructor,
706 intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
707 intrinsic, signature, name);
708 }
709#endif // ENABLE(JIT)
710 UNUSED_PARAM(intrinsic);
711 UNUSED_PARAM(signature);
712 return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
713}
714
715MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
716{
717#if ENABLE(JIT)
718 if (canUseJIT()) {
719 if (kind == CodeForCall)
720 return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
721 return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
722 }
723#endif
724 if (kind == CodeForCall)
725 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
726 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
727}
728
729VM::ClientData::~ClientData()
730{
731}
732
733void VM::resetDateCache()
734{
735 localTimeOffsetCache.reset();
736 cachedDateString = String();
737 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
738 dateInstanceCache.reset();
739}
740
741void VM::whenIdle(Function<void()>&& callback)
742{
743 if (!entryScope) {
744 callback();
745 return;
746 }
747
748 entryScope->addDidPopListener(WTFMove(callback));
749}
750
751void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
752{
753 whenIdle([=] () {
754 heap.deleteAllCodeBlocks(effort);
755 });
756}
757
758void VM::deleteAllCode(DeleteAllCodeEffort effort)
759{
760 whenIdle([=] () {
761 m_codeCache->clear();
762 m_regExpCache->deleteAllCode();
763 heap.deleteAllCodeBlocks(effort);
764 heap.deleteAllUnlinkedCodeBlocks(effort);
765 heap.reportAbandonedObjectGraph();
766 });
767}
768
769void VM::shrinkFootprintWhenIdle()
770{
771 whenIdle([=] () {
772 sanitizeStackForVM(this);
773 deleteAllCode(DeleteAllCodeIfNotCollecting);
774 heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
775 // FIXME: Consider stopping various automatic threads here.
776 // https://bugs.webkit.org/show_bug.cgi?id=185447
777 WTF::releaseFastMallocFreeMemory();
778 });
779}
780
781SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
782{
783 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
784 if (addResult.isNewEntry)
785 addResult.iterator->value = adoptRef(new SourceProviderCache);
786 return addResult.iterator->value.get();
787}
788
789void VM::clearSourceProviderCaches()
790{
791 sourceProviderCacheMap.clear();
792}
793
794Exception* VM::throwException(ExecState* exec, Exception* exception)
795{
796 ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
797 CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
798
799 if (Options::breakOnThrow()) {
800 CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
801 dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
802 CRASH();
803 }
804
805 interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
806
807 setException(exception);
808
809#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
810 m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
811 m_throwingThread = &Thread::current();
812#endif
813 return exception;
814}
815
816Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
817{
818 VM& vm = *this;
819 Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
820 if (!exception)
821 exception = Exception::create(*this, thrownValue);
822
823 return throwException(exec, exception);
824}
825
826Exception* VM::throwException(ExecState* exec, JSObject* error)
827{
828 return throwException(exec, JSValue(error));
829}
830
831void VM::setStackPointerAtVMEntry(void* sp)
832{
833 m_stackPointerAtVMEntry = sp;
834 updateStackLimits();
835}
836
837size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
838{
839 size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
840 m_currentSoftReservedZoneSize = softReservedZoneSize;
841#if ENABLE(C_LOOP)
842 interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
843#endif
844
845 updateStackLimits();
846
847 return oldSoftReservedZoneSize;
848}
849
850#if OS(WINDOWS)
851// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
852// where the guard page is a barrier between committed and uncommitted memory.
853// When data from the guard page is read or written, the guard page is moved, and memory is committed.
854// This is how the system grows the stack.
855// When using the C stack on Windows we need to precommit the needed stack space.
856// Otherwise we might crash later if we access uncommitted stack memory.
857// This can happen if we allocate stack space larger than the page guard size (4K).
858// The system does not get the chance to move the guard page, and commit more memory,
859// and we crash if uncommitted memory is accessed.
860// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
861// when needed, see http://support.microsoft.com/kb/100775.
862// By touching every page up to the stack limit with a dummy operation,
863// we force the system to move the guard page, and commit memory.
864
865static void preCommitStackMemory(void* stackLimit)
866{
867 const int pageSize = 4096;
868 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
869 char ch = *p;
870 *p = ch;
871 }
872}
873#endif
874
875inline void VM::updateStackLimits()
876{
877#if OS(WINDOWS)
878 void* lastSoftStackLimit = m_softStackLimit;
879#endif
880
881 const StackBounds& stack = Thread::current().stack();
882 size_t reservedZoneSize = Options::reservedZoneSize();
883 // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
884 // options initialization time, and the option value should not have been changed thereafter.
885 // We don't have the ability to assert here that it hasn't changed, but we can at least assert
886 // that the value is sane.
887 RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
888
889 if (m_stackPointerAtVMEntry) {
890 ASSERT(stack.isGrowingDownward());
891 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
892 m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
893 m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
894 } else {
895 m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
896 m_stackLimit = stack.recursionLimit(reservedZoneSize);
897 }
898
899#if OS(WINDOWS)
900 // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
901 // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
902 // generated code which can allocate stack space that the C++ compiler does not know
903 // about. As such, we have to precommit that stack memory manually.
904 //
905 // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
906 // used exclusively by C++ code, and the C++ compiler will automatically commit the
907 // needed stack pages.
908 if (lastSoftStackLimit != m_softStackLimit)
909 preCommitStackMemory(m_softStackLimit);
910#endif
911}
912
913#if ENABLE(DFG_JIT)
914void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
915{
916 auto lock = holdLock(m_scratchBufferLock);
917 for (auto* scratchBuffer : m_scratchBuffers) {
918 if (scratchBuffer->activeLength()) {
919 void* bufferStart = scratchBuffer->dataBuffer();
920 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
921 }
922 }
923}
924#endif
925
926void logSanitizeStack(VM* vm)
927{
928 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
929 int dummy;
930 auto& stackBounds = Thread::current().stack();
931 dataLog(
932 "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
933 ", current stack pointer at ", RawPointer(&dummy), ", in ",
934 pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
935 vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
936 }
937}
938
939#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
940char* VM::acquireRegExpPatternContexBuffer()
941{
942 m_regExpPatternContextLock.lock();
943 ASSERT(m_regExpPatternContextLock.isLocked());
944 if (!m_regExpPatternContexBuffer)
945 m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
946 return m_regExpPatternContexBuffer.get();
947}
948
949void VM::releaseRegExpPatternContexBuffer()
950{
951 ASSERT(m_regExpPatternContextLock.isLocked());
952
953 m_regExpPatternContextLock.unlock();
954}
955#endif
956
957#if ENABLE(REGEXP_TRACING)
958void VM::addRegExpToTrace(RegExp* regExp)
959{
960 gcProtect(regExp);
961 m_rtTraceList->add(regExp);
962}
963
964void VM::dumpRegExpTrace()
965{
966 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
967 RTTraceList::iterator iter = ++m_rtTraceList->begin();
968
969 if (iter != m_rtTraceList->end()) {
970 dataLogF("\nRegExp Tracing\n");
971 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
972 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
973 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
974
975 unsigned reCount = 0;
976
977 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
978 (*iter)->printTraceData();
979 gcUnprotect(*iter);
980 }
981
982 dataLogF("%d Regular Expressions\n", reCount);
983 }
984
985 m_rtTraceList->clear();
986}
987#else
988void VM::dumpRegExpTrace()
989{
990}
991#endif
992
993WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
994{
995 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
996 if (result.isNewEntry)
997 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
998 return result.iterator->value.get();
999}
1000
1001void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1002{
1003 ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1004}
1005
1006void VM::addImpureProperty(const String& propertyName)
1007{
1008 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1009 watchpointSet->fireAll(*this, "Impure property added");
1010}
1011
1012template<typename Func>
1013static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1014{
1015 bool needsToRecompile = false;
1016 if (!counter) {
1017 doEnableWork();
1018 needsToRecompile = true;
1019 }
1020 counter++;
1021
1022 return needsToRecompile;
1023}
1024
1025template<typename Func>
1026static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1027{
1028 RELEASE_ASSERT(counter > 0);
1029 bool needsToRecompile = false;
1030 counter--;
1031 if (!counter) {
1032 doDisableWork();
1033 needsToRecompile = true;
1034 }
1035
1036 return needsToRecompile;
1037}
1038
1039bool VM::enableTypeProfiler()
1040{
1041 auto enableTypeProfiler = [this] () {
1042 this->m_typeProfiler = std::make_unique<TypeProfiler>();
1043 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1044 };
1045
1046 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1047}
1048
1049bool VM::disableTypeProfiler()
1050{
1051 auto disableTypeProfiler = [this] () {
1052 this->m_typeProfiler.reset(nullptr);
1053 this->m_typeProfilerLog.reset(nullptr);
1054 };
1055
1056 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1057}
1058
1059bool VM::enableControlFlowProfiler()
1060{
1061 auto enableControlFlowProfiler = [this] () {
1062 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1063 };
1064
1065 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1066}
1067
1068bool VM::disableControlFlowProfiler()
1069{
1070 auto disableControlFlowProfiler = [this] () {
1071 this->m_controlFlowProfiler.reset(nullptr);
1072 };
1073
1074 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1075}
1076
1077void VM::dumpTypeProfilerData()
1078{
1079 if (!typeProfiler())
1080 return;
1081
1082 typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1083 typeProfiler()->dumpTypeProfilerData(*this);
1084}
1085
1086void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1087{
1088 m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1089}
1090
1091void VM::drainMicrotasks()
1092{
1093 while (!m_microtaskQueue.isEmpty()) {
1094 m_microtaskQueue.takeFirst()->run();
1095 if (m_onEachMicrotaskTick)
1096 m_onEachMicrotaskTick(*this);
1097 }
1098}
1099
1100void QueuedTask::run()
1101{
1102 m_microtask->run(m_globalObject->globalExec());
1103}
1104
1105void sanitizeStackForVM(VM* vm)
1106{
1107 logSanitizeStack(vm);
1108 if (vm->topCallFrame) {
1109 auto& stackBounds = Thread::current().stack();
1110 ASSERT(vm->currentThreadIsHoldingAPILock());
1111 ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1112 }
1113#if ENABLE(C_LOOP)
1114 vm->interpreter->cloopStack().sanitizeStack();
1115#else
1116 sanitizeStackForVMImpl(vm);
1117#endif
1118}
1119
1120size_t VM::committedStackByteCount()
1121{
1122#if !ENABLE(C_LOOP)
1123 // When using the C stack, we don't know how many stack pages are actually
1124 // committed. So, we use the current stack usage as an estimate.
1125 ASSERT(Thread::current().stack().isGrowingDownward());
1126 uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1127 uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1128 return high - current;
1129#else
1130 return CLoopStack::committedByteCount();
1131#endif
1132}
1133
1134#if ENABLE(C_LOOP)
1135bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1136{
1137 return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1138}
1139
1140bool VM::isSafeToRecurseSoftCLoop() const
1141{
1142 return interpreter->cloopStack().isSafeToRecurse();
1143}
1144
1145void* VM::currentCLoopStackPointer() const
1146{
1147 return interpreter->cloopStack().currentStackPointer();
1148}
1149#endif // ENABLE(C_LOOP)
1150
1151#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1152void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1153{
1154 if (!Options::validateExceptionChecks())
1155 return;
1156
1157 if (UNLIKELY(m_needExceptionCheck)) {
1158 auto throwDepth = m_simulatedThrowPointRecursionDepth;
1159 auto& throwLocation = m_simulatedThrowPointLocation;
1160
1161 dataLog(
1162 "ERROR: Unchecked JS exception:\n"
1163 " This scope can throw a JS exception: ", throwLocation, "\n"
1164 " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1165 " But the exception was unchecked as of this scope: ", location, "\n"
1166 " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1167 "\n");
1168
1169 StringPrintStream out;
1170 std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1171
1172 if (Options::dumpSimulatedThrows()) {
1173 out.println("The simulated exception was thrown at:");
1174 m_nativeStackTraceOfLastSimulatedThrow->dump(out, " ");
1175 out.println();
1176 }
1177 out.println("Unchecked exception detected at:");
1178 currentTrace->dump(out, " ");
1179 out.println();
1180
1181 dataLog(out.toCString());
1182 RELEASE_ASSERT(!m_needExceptionCheck);
1183 }
1184}
1185#endif
1186
1187#if USE(CF)
1188void VM::setRunLoop(CFRunLoopRef runLoop)
1189{
1190 ASSERT(runLoop);
1191 m_runLoop = runLoop;
1192 JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1193}
1194#endif // USE(CF)
1195
1196ScratchBuffer* VM::scratchBufferForSize(size_t size)
1197{
1198 if (!size)
1199 return nullptr;
1200
1201 auto locker = holdLock(m_scratchBufferLock);
1202
1203 if (size > m_sizeOfLastScratchBuffer) {
1204 // Protect against a N^2 memory usage pathology by ensuring
1205 // that at worst, we get a geometric series, meaning that the
1206 // total memory usage is somewhere around
1207 // max(scratch buffer size) * 4.
1208 m_sizeOfLastScratchBuffer = size * 2;
1209
1210 ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1211 RELEASE_ASSERT(newBuffer);
1212 m_scratchBuffers.append(newBuffer);
1213 }
1214
1215 ScratchBuffer* result = m_scratchBuffers.last();
1216 return result;
1217}
1218
1219void VM::clearScratchBuffers()
1220{
1221 auto lock = holdLock(m_scratchBufferLock);
1222 for (auto* scratchBuffer : m_scratchBuffers)
1223 scratchBuffer->setActiveLength(0);
1224}
1225
1226void VM::ensureShadowChicken()
1227{
1228 if (m_shadowChicken)
1229 return;
1230 m_shadowChicken = std::make_unique<ShadowChicken>();
1231}
1232
1233#define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1234 IsoSubspace* VM::name##Slow() \
1235 { \
1236 ASSERT(!m_##name); \
1237 auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1238 WTF::storeStoreFence(); \
1239 m_##name = WTFMove(space); \
1240 return m_##name.get(); \
1241 }
1242
1243
1244DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1245DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1246DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1247DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1248DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1249DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1250DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1251DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1252#if JSC_OBJC_API_ENABLED
1253DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1254#endif
1255#if ENABLE(WEBASSEMBLY)
1256DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1257DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction)
1258DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1259#endif
1260
1261#undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1262
1263#define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1264 IsoSubspace* VM::name##Slow() \
1265 { \
1266 ASSERT(!m_##name); \
1267 auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1268 WTF::storeStoreFence(); \
1269 m_##name = WTFMove(space); \
1270 return &m_##name->space; \
1271 }
1272
1273DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1274DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1275
1276#undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1277
1278Structure* VM::setIteratorStructureSlow()
1279{
1280 ASSERT(!m_setIteratorStructure);
1281 m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1282 return m_setIteratorStructure.get();
1283}
1284
1285Structure* VM::mapIteratorStructureSlow()
1286{
1287 ASSERT(!m_mapIteratorStructure);
1288 m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1289 return m_mapIteratorStructure.get();
1290}
1291
1292JSCell* VM::sentinelSetBucketSlow()
1293{
1294 ASSERT(!m_sentinelSetBucket);
1295 auto* sentinel = JSSet::BucketType::createSentinel(*this);
1296 m_sentinelSetBucket.set(*this, sentinel);
1297 return sentinel;
1298}
1299
1300JSCell* VM::sentinelMapBucketSlow()
1301{
1302 ASSERT(!m_sentinelMapBucket);
1303 auto* sentinel = JSMap::BucketType::createSentinel(*this);
1304 m_sentinelMapBucket.set(*this, sentinel);
1305 return sentinel;
1306}
1307
1308JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1309{
1310 if (callFrame && callFrame->isGlobalExec()) {
1311 ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1312 ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1313 return callFrame->lexicalGlobalObject();
1314 }
1315 ASSERT(entryScope);
1316 return entryScope->globalObject();
1317}
1318
1319void VM::setCrashOnVMCreation(bool shouldCrash)
1320{
1321 vmCreationShouldCrash = shouldCrash;
1322}
1323
1324} // namespace JSC
1325