1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include "VM.h"
31
32#include "ArgList.h"
33#include "ArrayBufferNeuteringWatchpointSet.h"
34#include "BuiltinExecutables.h"
35#include "BytecodeIntrinsicRegistry.h"
36#include "CodeBlock.h"
37#include "CodeCache.h"
38#include "CommonIdentifiers.h"
39#include "CommonSlowPaths.h"
40#include "CustomGetterSetter.h"
41#include "DFGWorklist.h"
42#include "DirectEvalExecutable.h"
43#include "Disassembler.h"
44#include "DoublePredictionFuzzerAgent.h"
45#include "Error.h"
46#include "ErrorConstructor.h"
47#include "ErrorInstance.h"
48#include "EvalCodeBlock.h"
49#include "Exception.h"
50#include "ExecutableToCodeBlockEdge.h"
51#include "FTLThunks.h"
52#include "FastMallocAlignedMemoryAllocator.h"
53#include "FunctionCodeBlock.h"
54#include "FunctionConstructor.h"
55#include "FunctionExecutable.h"
56#include "GCActivityCallback.h"
57#include "GetterSetter.h"
58#include "GigacageAlignedMemoryAllocator.h"
59#include "HasOwnPropertyCache.h"
60#include "Heap.h"
61#include "HeapIterationScope.h"
62#include "HeapProfiler.h"
63#include "HostCallReturnValue.h"
64#include "Identifier.h"
65#include "IncrementalSweeper.h"
66#include "IndirectEvalExecutable.h"
67#include "InferredValue.h"
68#include "Interpreter.h"
69#include "IntlCollatorConstructor.h"
70#include "IntlDateTimeFormatConstructor.h"
71#include "IntlNumberFormatConstructor.h"
72#include "IntlPluralRulesConstructor.h"
73#include "JITCode.h"
74#include "JITWorklist.h"
75#include "JSAPIValueWrapper.h"
76#include "JSArray.h"
77#include "JSArrayBufferConstructor.h"
78#include "JSAsyncFunction.h"
79#include "JSBigInt.h"
80#include "JSBoundFunction.h"
81#include "JSCInlines.h"
82#include "JSCallbackFunction.h"
83#include "JSCustomGetterSetterFunction.h"
84#include "JSDestructibleObjectHeapCellType.h"
85#include "JSFixedArray.h"
86#include "JSFunction.h"
87#include "JSGlobalObjectFunctions.h"
88#include "JSImmutableButterfly.h"
89#include "JSInternalPromiseDeferred.h"
90#include "JSLock.h"
91#include "JSMap.h"
92#include "JSMapIterator.h"
93#include "JSPromiseDeferred.h"
94#include "JSPropertyNameEnumerator.h"
95#include "JSScriptFetchParameters.h"
96#include "JSScriptFetcher.h"
97#include "JSSet.h"
98#include "JSSetIterator.h"
99#include "JSSourceCode.h"
100#include "JSStringHeapCellType.h"
101#include "JSTemplateObjectDescriptor.h"
102#include "JSWeakMap.h"
103#include "JSWeakSet.h"
104#include "JSWebAssembly.h"
105#include "JSWebAssemblyCodeBlock.h"
106#include "JSWebAssemblyCodeBlockHeapCellType.h"
107#include "JSWithScope.h"
108#include "LLIntData.h"
109#include "Lexer.h"
110#include "Lookup.h"
111#include "MinimumReservedZoneSize.h"
112#include "ModuleProgramCodeBlock.h"
113#include "ModuleProgramExecutable.h"
114#include "NativeErrorConstructor.h"
115#include "NativeExecutable.h"
116#include "NativeStdFunctionCell.h"
117#include "Nodes.h"
118#include "ObjCCallbackFunction.h"
119#include "Parser.h"
120#include "ProfilerDatabase.h"
121#include "ProgramCodeBlock.h"
122#include "ProgramExecutable.h"
123#include "PromiseDeferredTimer.h"
124#include "PropertyMapHashTable.h"
125#include "ProxyRevoke.h"
126#include "RandomizingFuzzerAgent.h"
127#include "RegExpCache.h"
128#include "RegExpObject.h"
129#include "RegisterAtOffsetList.h"
130#include "RuntimeType.h"
131#include "SamplingProfiler.h"
132#include "ShadowChicken.h"
133#include "SimpleTypedArrayController.h"
134#include "SourceProviderCache.h"
135#include "StackVisitor.h"
136#include "StrictEvalActivation.h"
137#include "StrongInlines.h"
138#include "StructureInlines.h"
139#include "TestRunnerUtils.h"
140#include "ThunkGenerators.h"
141#include "TypeProfiler.h"
142#include "TypeProfilerLog.h"
143#include "UnlinkedCodeBlock.h"
144#include "VMEntryScope.h"
145#include "VMInlines.h"
146#include "VMInspector.h"
147#include "VariableEnvironment.h"
148#include "WasmWorklist.h"
149#include "Watchdog.h"
150#include "WeakGCMapInlines.h"
151#include "WebAssemblyFunction.h"
152#include "WebAssemblyFunctionHeapCellType.h"
153#include "WebAssemblyWrapperFunction.h"
154#include <wtf/ProcessID.h>
155#include <wtf/ReadWriteLock.h>
156#include <wtf/SimpleStats.h>
157#include <wtf/StringPrintStream.h>
158#include <wtf/Threading.h>
159#include <wtf/text/AtomicStringTable.h>
160#include <wtf/text/SymbolRegistry.h>
161
162#if ENABLE(C_LOOP)
163#include "CLoopStack.h"
164#include "CLoopStackInlines.h"
165#endif
166
167#if ENABLE(DFG_JIT)
168#include "ConservativeRoots.h"
169#endif
170
171#if ENABLE(REGEXP_TRACING)
172#include "RegExp.h"
173#endif
174
175namespace JSC {
176
177#if ENABLE(JIT)
178#if !ASSERT_DISABLED
179bool VM::s_canUseJITIsSet = false;
180#endif
181bool VM::s_canUseJIT = false;
182#endif
183
184Atomic<unsigned> VM::s_numberOfIDs;
185
186// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190#if ENABLE(ASSEMBLER)
191static bool enableAssembler()
192{
193 if (!Options::useJIT() && !Options::useRegExpJIT())
194 return false;
195
196 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197 if (canUseJITString && !atoi(canUseJITString))
198 return false;
199
200 ExecutableAllocator::initializeUnderlyingAllocator();
201 if (!ExecutableAllocator::singleton().isValid()) {
202 if (Options::crashIfCantAllocateJITMemory())
203 CRASH();
204 return false;
205 }
206
207 return true;
208}
209#endif // ENABLE(!ASSEMBLER)
210
211bool VM::canUseAssembler()
212{
213#if ENABLE(ASSEMBLER)
214 static std::once_flag onceKey;
215 static bool enabled = false;
216 std::call_once(onceKey, [] {
217 enabled = enableAssembler();
218 });
219 return enabled;
220#else
221 return false; // interpreter only
222#endif
223}
224
225void VM::computeCanUseJIT()
226{
227#if ENABLE(JIT)
228#if !ASSERT_DISABLED
229 RELEASE_ASSERT(!s_canUseJITIsSet);
230 s_canUseJITIsSet = true;
231#endif
232 s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233#endif
234}
235
236bool VM::canUseRegExpJIT()
237{
238#if ENABLE(YARR_JIT)
239 static std::once_flag onceKey;
240 static bool enabled = false;
241 std::call_once(onceKey, [] {
242 enabled = VM::canUseAssembler() && Options::useRegExpJIT();
243 });
244 return enabled;
245#else
246 return false; // interpreter only
247#endif
248}
249
250bool VM::isInMiniMode()
251{
252 return !canUseJIT() || Options::forceMiniVMMode();
253}
254
255inline unsigned VM::nextID()
256{
257 for (;;) {
258 unsigned currentNumberOfIDs = s_numberOfIDs.load();
259 unsigned newID = currentNumberOfIDs + 1;
260 if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
261 return newID;
262 }
263}
264
265static bool vmCreationShouldCrash = false;
266
267VM::VM(VMType vmType, HeapType heapType)
268 : m_id(nextID())
269 , m_apiLock(adoptRef(new JSLock(this)))
270#if USE(CF)
271 , m_runLoop(CFRunLoopGetCurrent())
272#endif // USE(CF)
273 , heap(this, heapType)
274 , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
275 , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
276 , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
277 , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
278 , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
279 , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
280 , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
281 , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
282 , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
283#if ENABLE(WEBASSEMBLY)
284 , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
285 , webAssemblyFunctionHeapCellType(std::make_unique<WebAssemblyFunctionHeapCellType>())
286#endif
287 , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
288 , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
289 , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
290 , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
291 , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
292 , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
293 , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
294 , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
295 , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
296 , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
297 , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
298 , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
299 , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
300 , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
301 , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
302 , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
303 , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
304 , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
305 , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
306 , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
307 , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
308 , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
309 , vmType(vmType)
310 , clientData(0)
311 , topEntryFrame(nullptr)
312 , topCallFrame(CallFrame::noCaller())
313 , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
314 , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
315 , propertyNames(nullptr)
316 , emptyList(new ArgList)
317 , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
318 , customGetterSetterFunctionMap(*this)
319 , stringCache(*this)
320 , symbolImplToSymbolMap(*this)
321 , structureCache(*this)
322 , interpreter(0)
323 , entryScope(0)
324 , m_regExpCache(new RegExpCache(this))
325 , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
326#if ENABLE(REGEXP_TRACING)
327 , m_rtTraceList(new RTTraceList())
328#endif
329#if ENABLE(GC_VALIDATION)
330 , m_initializingObjectClass(0)
331#endif
332 , m_stackPointerAtVMEntry(0)
333 , m_codeCache(std::make_unique<CodeCache>())
334 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
335 , m_typeProfilerEnabledCount(0)
336 , m_primitiveGigacageEnabled(IsWatched)
337 , m_controlFlowProfilerEnabledCount(0)
338{
339 if (UNLIKELY(vmCreationShouldCrash))
340 CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
341
342 interpreter = new Interpreter(*this);
343 StackBounds stack = Thread::current().stack();
344 updateSoftReservedZoneSize(Options::softReservedZoneSize());
345 setLastStackTop(stack.origin());
346
347 JSRunLoopTimer::Manager::shared().registerVM(*this);
348
349 // Need to be careful to keep everything consistent here
350 JSLockHolder lock(this);
351 AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
352 structureStructure.set(*this, Structure::createStructure(*this));
353 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
354 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
355
356 smallStrings.initializeCommonStrings(*this);
357
358 propertyNames = new CommonIdentifiers(this);
359 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
360 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
361 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
362 domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
363 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
364 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
365 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
366 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
367 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
368 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
369#if ENABLE(WEBASSEMBLY)
370 webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
371#endif
372 moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
373 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
374 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
375 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
376 fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
377
378 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
379 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
380 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
381
382 sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
383 scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
384 scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
385 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
386 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
387 templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
388 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
389 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
390 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
391 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
392 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
393 unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
394 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
395 if (VM::canUseJIT())
396 inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
397 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
398 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
399 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
400 internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
401 nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
402 programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
403 moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
404 evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
405 functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
406 hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
407 hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
408 bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
409 executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
410
411 // Eagerly initialize constant cells since the concurrent compiler can access them.
412 if (canUseJIT()) {
413 sentinelMapBucket();
414 sentinelSetBucket();
415 }
416
417 Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
418
419#if !ENABLE(C_LOOP)
420 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
421#endif
422
423 Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
424
425 heap.notifyIsSafeToCollect();
426
427 LLInt::Data::performAssertions(*this);
428
429 if (UNLIKELY(Options::useProfiler())) {
430 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
431
432 StringPrintStream pathOut;
433 const char* profilerPath = getenv("JSC_PROFILER_PATH");
434 if (profilerPath)
435 pathOut.print(profilerPath, "/");
436 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
437 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
438 }
439
440 callFrameForCatch = nullptr;
441
442 // Initialize this last, as a free way of asserting that VM initialization itself
443 // won't use this.
444 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
445
446 m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
447
448 if (Options::useTypeProfiler())
449 enableTypeProfiler();
450 if (Options::useControlFlowProfiler())
451 enableControlFlowProfiler();
452#if ENABLE(SAMPLING_PROFILER)
453 if (Options::useSamplingProfiler()) {
454 setShouldBuildPCToCodeOriginMapping();
455 Ref<Stopwatch> stopwatch = Stopwatch::create();
456 stopwatch->start();
457 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
458 if (Options::samplingProfilerPath())
459 m_samplingProfiler->registerForReportAtExit();
460 m_samplingProfiler->start();
461 }
462#endif // ENABLE(SAMPLING_PROFILER)
463
464 if (Options::useRandomizingFuzzerAgent())
465 setFuzzerAgent(std::make_unique<RandomizingFuzzerAgent>(*this));
466 else if (Options::useDoublePredictionFuzzerAgent())
467 setFuzzerAgent(std::make_unique<DoublePredictionFuzzerAgent>(*this));
468
469 if (Options::alwaysGeneratePCToCodeOriginMap())
470 setShouldBuildPCToCodeOriginMapping();
471
472 if (Options::watchdog()) {
473 Watchdog& watchdog = ensureWatchdog();
474 watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
475 }
476
477#if ENABLE(JIT)
478 // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
479 if (canUseJIT()) {
480 jitStubs = std::make_unique<JITThunks>();
481#if ENABLE(FTL_JIT)
482 ftlThunks = std::make_unique<FTL::Thunks>();
483#endif // ENABLE(FTL_JIT)
484 getCTIInternalFunctionTrampolineFor(CodeForCall);
485 getCTIInternalFunctionTrampolineFor(CodeForConstruct);
486 }
487#endif
488
489 if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
490 ensureShadowChicken();
491
492 VMInspector::instance().add(this);
493}
494
495static ReadWriteLock s_destructionLock;
496
497void waitForVMDestruction()
498{
499 auto locker = holdLock(s_destructionLock.write());
500}
501
502VM::~VM()
503{
504 auto destructionLocker = holdLock(s_destructionLock.read());
505
506 Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
507 promiseDeferredTimer->stopRunningTasks();
508#if ENABLE(WEBASSEMBLY)
509 if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
510 worklist->stopAllPlansForContext(wasmContext);
511#endif
512 if (UNLIKELY(m_watchdog))
513 m_watchdog->willDestroyVM(this);
514 m_traps.willDestroyVM();
515 VMInspector::instance().remove(this);
516
517 // Never GC, ever again.
518 heap.incrementDeferralDepth();
519
520#if ENABLE(SAMPLING_PROFILER)
521 if (m_samplingProfiler) {
522 m_samplingProfiler->reportDataToOptionFile();
523 m_samplingProfiler->shutdown();
524 }
525#endif // ENABLE(SAMPLING_PROFILER)
526
527#if ENABLE(JIT)
528 if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
529 worklist->completeAllForVM(*this);
530#endif // ENABLE(JIT)
531
532#if ENABLE(DFG_JIT)
533 // Make sure concurrent compilations are done, but don't install them, since there is
534 // no point to doing so.
535 for (unsigned i = DFG::numberOfWorklists(); i--;) {
536 if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
537 worklist->removeNonCompilingPlansForVM(*this);
538 worklist->waitUntilAllPlansForVMAreReady(*this);
539 worklist->removeAllReadyPlansForVM(*this);
540 }
541 }
542#endif // ENABLE(DFG_JIT)
543
544 waitForAsynchronousDisassembly();
545
546 // Clear this first to ensure that nobody tries to remove themselves from it.
547 m_perBytecodeProfiler = nullptr;
548
549 ASSERT(currentThreadIsHoldingAPILock());
550 m_apiLock->willDestroyVM(this);
551 smallStrings.setIsInitialized(false);
552 heap.lastChanceToFinalize();
553
554 JSRunLoopTimer::Manager::shared().unregisterVM(*this);
555
556 delete interpreter;
557#ifndef NDEBUG
558 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
559#endif
560
561 delete emptyList;
562
563 delete propertyNames;
564 if (vmType != Default)
565 delete m_atomicStringTable;
566
567 delete clientData;
568 delete m_regExpCache;
569
570#if ENABLE(REGEXP_TRACING)
571 delete m_rtTraceList;
572#endif
573
574#if ENABLE(DFG_JIT)
575 for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
576 fastFree(m_scratchBuffers[i]);
577#endif
578}
579
580void VM::primitiveGigacageDisabledCallback(void* argument)
581{
582 static_cast<VM*>(argument)->primitiveGigacageDisabled();
583}
584
585void VM::primitiveGigacageDisabled()
586{
587 if (m_apiLock->currentThreadIsHoldingLock()) {
588 m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
589 return;
590 }
591
592 // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
593 // uncaged buffer in a nicely synchronized manner.
594 m_needToFirePrimitiveGigacageEnabled = true;
595}
596
597void VM::setLastStackTop(void* lastStackTop)
598{
599 m_lastStackTop = lastStackTop;
600}
601
602Ref<VM> VM::createContextGroup(HeapType heapType)
603{
604 return adoptRef(*new VM(APIContextGroup, heapType));
605}
606
607Ref<VM> VM::create(HeapType heapType)
608{
609 return adoptRef(*new VM(Default, heapType));
610}
611
612bool VM::sharedInstanceExists()
613{
614 return sharedInstanceInternal();
615}
616
617VM& VM::sharedInstance()
618{
619 GlobalJSLock globalLock;
620 VM*& instance = sharedInstanceInternal();
621 if (!instance)
622 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
623 return *instance;
624}
625
626VM*& VM::sharedInstanceInternal()
627{
628 static VM* sharedInstance;
629 return sharedInstance;
630}
631
632Watchdog& VM::ensureWatchdog()
633{
634 if (!m_watchdog)
635 m_watchdog = adoptRef(new Watchdog(this));
636 return *m_watchdog;
637}
638
639HeapProfiler& VM::ensureHeapProfiler()
640{
641 if (!m_heapProfiler)
642 m_heapProfiler = std::make_unique<HeapProfiler>(*this);
643 return *m_heapProfiler;
644}
645
646#if ENABLE(SAMPLING_PROFILER)
647SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
648{
649 if (!m_samplingProfiler)
650 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
651 return *m_samplingProfiler;
652}
653#endif // ENABLE(SAMPLING_PROFILER)
654
655#if ENABLE(JIT)
656static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
657{
658 switch (intrinsic) {
659 case CharCodeAtIntrinsic:
660 return charCodeAtThunkGenerator;
661 case CharAtIntrinsic:
662 return charAtThunkGenerator;
663 case Clz32Intrinsic:
664 return clz32ThunkGenerator;
665 case FromCharCodeIntrinsic:
666 return fromCharCodeThunkGenerator;
667 case SqrtIntrinsic:
668 return sqrtThunkGenerator;
669 case AbsIntrinsic:
670 return absThunkGenerator;
671 case FloorIntrinsic:
672 return floorThunkGenerator;
673 case CeilIntrinsic:
674 return ceilThunkGenerator;
675 case TruncIntrinsic:
676 return truncThunkGenerator;
677 case RoundIntrinsic:
678 return roundThunkGenerator;
679 case ExpIntrinsic:
680 return expThunkGenerator;
681 case LogIntrinsic:
682 return logThunkGenerator;
683 case IMulIntrinsic:
684 return imulThunkGenerator;
685 case RandomIntrinsic:
686 return randomThunkGenerator;
687 case BoundThisNoArgsFunctionCallIntrinsic:
688 return boundThisNoArgsFunctionCallGenerator;
689 default:
690 return nullptr;
691 }
692}
693
694#endif // ENABLE(JIT)
695
696NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
697{
698 return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
699}
700
701static Ref<NativeJITCode> jitCodeForCallTrampoline()
702{
703 static NativeJITCode* result;
704 static std::once_flag onceKey;
705 std::call_once(onceKey, [&] {
706 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITCode::HostCallThunk, NoIntrinsic);
707 });
708 return makeRef(*result);
709}
710
711static Ref<NativeJITCode> jitCodeForConstructTrampoline()
712{
713 static NativeJITCode* result;
714 static std::once_flag onceKey;
715 std::call_once(onceKey, [&] {
716 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITCode::HostCallThunk, NoIntrinsic);
717 });
718 return makeRef(*result);
719}
720
721NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
722{
723#if ENABLE(JIT)
724 if (canUseJIT()) {
725 return jitStubs->hostFunctionStub(
726 this, function, constructor,
727 intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
728 intrinsic, signature, name);
729 }
730#endif // ENABLE(JIT)
731 UNUSED_PARAM(intrinsic);
732 UNUSED_PARAM(signature);
733 return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
734}
735
736MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
737{
738#if ENABLE(JIT)
739 if (canUseJIT()) {
740 if (kind == CodeForCall)
741 return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
742 return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
743 }
744#endif
745 if (kind == CodeForCall)
746 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
747 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
748}
749
750VM::ClientData::~ClientData()
751{
752}
753
754void VM::resetDateCache()
755{
756 localTimeOffsetCache.reset();
757 cachedDateString = String();
758 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
759 dateInstanceCache.reset();
760}
761
762void VM::whenIdle(Function<void()>&& callback)
763{
764 if (!entryScope) {
765 callback();
766 return;
767 }
768
769 entryScope->addDidPopListener(WTFMove(callback));
770}
771
772void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
773{
774 whenIdle([=] () {
775 heap.deleteAllCodeBlocks(effort);
776 });
777}
778
779void VM::deleteAllCode(DeleteAllCodeEffort effort)
780{
781 whenIdle([=] () {
782 m_codeCache->clear();
783 m_regExpCache->deleteAllCode();
784 heap.deleteAllCodeBlocks(effort);
785 heap.deleteAllUnlinkedCodeBlocks(effort);
786 heap.reportAbandonedObjectGraph();
787 });
788}
789
790void VM::shrinkFootprintWhenIdle()
791{
792 whenIdle([=] () {
793 sanitizeStackForVM(this);
794 deleteAllCode(DeleteAllCodeIfNotCollecting);
795 heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
796 // FIXME: Consider stopping various automatic threads here.
797 // https://bugs.webkit.org/show_bug.cgi?id=185447
798 WTF::releaseFastMallocFreeMemory();
799 });
800}
801
802SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
803{
804 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
805 if (addResult.isNewEntry)
806 addResult.iterator->value = adoptRef(new SourceProviderCache);
807 return addResult.iterator->value.get();
808}
809
810void VM::clearSourceProviderCaches()
811{
812 sourceProviderCacheMap.clear();
813}
814
815Exception* VM::throwException(ExecState* exec, Exception* exception)
816{
817 ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
818 CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
819
820 if (Options::breakOnThrow()) {
821 CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
822 dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
823 CRASH();
824 }
825
826 interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
827
828 setException(exception);
829
830#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
831 m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
832 m_throwingThread = &Thread::current();
833#endif
834 return exception;
835}
836
837Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
838{
839 VM& vm = *this;
840 Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
841 if (!exception)
842 exception = Exception::create(*this, thrownValue);
843
844 return throwException(exec, exception);
845}
846
847Exception* VM::throwException(ExecState* exec, JSObject* error)
848{
849 return throwException(exec, JSValue(error));
850}
851
852void VM::setStackPointerAtVMEntry(void* sp)
853{
854 m_stackPointerAtVMEntry = sp;
855 updateStackLimits();
856}
857
858size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
859{
860 size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
861 m_currentSoftReservedZoneSize = softReservedZoneSize;
862#if ENABLE(C_LOOP)
863 interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
864#endif
865
866 updateStackLimits();
867
868 return oldSoftReservedZoneSize;
869}
870
871#if OS(WINDOWS)
872// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
873// where the guard page is a barrier between committed and uncommitted memory.
874// When data from the guard page is read or written, the guard page is moved, and memory is committed.
875// This is how the system grows the stack.
876// When using the C stack on Windows we need to precommit the needed stack space.
877// Otherwise we might crash later if we access uncommitted stack memory.
878// This can happen if we allocate stack space larger than the page guard size (4K).
879// The system does not get the chance to move the guard page, and commit more memory,
880// and we crash if uncommitted memory is accessed.
881// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
882// when needed, see http://support.microsoft.com/kb/100775.
883// By touching every page up to the stack limit with a dummy operation,
884// we force the system to move the guard page, and commit memory.
885
886static void preCommitStackMemory(void* stackLimit)
887{
888 const int pageSize = 4096;
889 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
890 char ch = *p;
891 *p = ch;
892 }
893}
894#endif
895
896inline void VM::updateStackLimits()
897{
898#if OS(WINDOWS)
899 void* lastSoftStackLimit = m_softStackLimit;
900#endif
901
902 const StackBounds& stack = Thread::current().stack();
903 size_t reservedZoneSize = Options::reservedZoneSize();
904 // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
905 // options initialization time, and the option value should not have been changed thereafter.
906 // We don't have the ability to assert here that it hasn't changed, but we can at least assert
907 // that the value is sane.
908 RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
909
910 if (m_stackPointerAtVMEntry) {
911 ASSERT(stack.isGrowingDownward());
912 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
913 m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
914 m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
915 } else {
916 m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
917 m_stackLimit = stack.recursionLimit(reservedZoneSize);
918 }
919
920#if OS(WINDOWS)
921 // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
922 // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
923 // generated code which can allocate stack space that the C++ compiler does not know
924 // about. As such, we have to precommit that stack memory manually.
925 //
926 // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
927 // used exclusively by C++ code, and the C++ compiler will automatically commit the
928 // needed stack pages.
929 if (lastSoftStackLimit != m_softStackLimit)
930 preCommitStackMemory(m_softStackLimit);
931#endif
932}
933
934#if ENABLE(DFG_JIT)
935void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
936{
937 auto lock = holdLock(m_scratchBufferLock);
938 for (auto* scratchBuffer : m_scratchBuffers) {
939 if (scratchBuffer->activeLength()) {
940 void* bufferStart = scratchBuffer->dataBuffer();
941 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
942 }
943 }
944}
945#endif
946
947void logSanitizeStack(VM* vm)
948{
949 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
950 int dummy;
951 auto& stackBounds = Thread::current().stack();
952 dataLog(
953 "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
954 ", current stack pointer at ", RawPointer(&dummy), ", in ",
955 pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
956 vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
957 }
958}
959
960#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
961char* VM::acquireRegExpPatternContexBuffer()
962{
963 m_regExpPatternContextLock.lock();
964 ASSERT(m_regExpPatternContextLock.isLocked());
965 if (!m_regExpPatternContexBuffer)
966 m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
967 return m_regExpPatternContexBuffer.get();
968}
969
970void VM::releaseRegExpPatternContexBuffer()
971{
972 ASSERT(m_regExpPatternContextLock.isLocked());
973
974 m_regExpPatternContextLock.unlock();
975}
976#endif
977
978#if ENABLE(REGEXP_TRACING)
979void VM::addRegExpToTrace(RegExp* regExp)
980{
981 gcProtect(regExp);
982 m_rtTraceList->add(regExp);
983}
984
985void VM::dumpRegExpTrace()
986{
987 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
988 RTTraceList::iterator iter = ++m_rtTraceList->begin();
989
990 if (iter != m_rtTraceList->end()) {
991 dataLogF("\nRegExp Tracing\n");
992 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
993 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
994 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
995
996 unsigned reCount = 0;
997
998 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
999 (*iter)->printTraceData();
1000 gcUnprotect(*iter);
1001 }
1002
1003 dataLogF("%d Regular Expressions\n", reCount);
1004 }
1005
1006 m_rtTraceList->clear();
1007}
1008#else
1009void VM::dumpRegExpTrace()
1010{
1011}
1012#endif
1013
1014WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1015{
1016 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1017 if (result.isNewEntry)
1018 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1019 return result.iterator->value.get();
1020}
1021
1022void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1023{
1024 ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1025}
1026
1027void VM::addImpureProperty(const String& propertyName)
1028{
1029 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1030 watchpointSet->fireAll(*this, "Impure property added");
1031}
1032
1033template<typename Func>
1034static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1035{
1036 bool needsToRecompile = false;
1037 if (!counter) {
1038 doEnableWork();
1039 needsToRecompile = true;
1040 }
1041 counter++;
1042
1043 return needsToRecompile;
1044}
1045
1046template<typename Func>
1047static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1048{
1049 RELEASE_ASSERT(counter > 0);
1050 bool needsToRecompile = false;
1051 counter--;
1052 if (!counter) {
1053 doDisableWork();
1054 needsToRecompile = true;
1055 }
1056
1057 return needsToRecompile;
1058}
1059
1060bool VM::enableTypeProfiler()
1061{
1062 auto enableTypeProfiler = [this] () {
1063 this->m_typeProfiler = std::make_unique<TypeProfiler>();
1064 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1065 };
1066
1067 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1068}
1069
1070bool VM::disableTypeProfiler()
1071{
1072 auto disableTypeProfiler = [this] () {
1073 this->m_typeProfiler.reset(nullptr);
1074 this->m_typeProfilerLog.reset(nullptr);
1075 };
1076
1077 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1078}
1079
1080bool VM::enableControlFlowProfiler()
1081{
1082 auto enableControlFlowProfiler = [this] () {
1083 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1084 };
1085
1086 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1087}
1088
1089bool VM::disableControlFlowProfiler()
1090{
1091 auto disableControlFlowProfiler = [this] () {
1092 this->m_controlFlowProfiler.reset(nullptr);
1093 };
1094
1095 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1096}
1097
1098void VM::dumpTypeProfilerData()
1099{
1100 if (!typeProfiler())
1101 return;
1102
1103 typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1104 typeProfiler()->dumpTypeProfilerData(*this);
1105}
1106
1107void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1108{
1109 m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1110}
1111
1112void VM::drainMicrotasks()
1113{
1114 while (!m_microtaskQueue.isEmpty()) {
1115 m_microtaskQueue.takeFirst()->run();
1116 if (m_onEachMicrotaskTick)
1117 m_onEachMicrotaskTick(*this);
1118 }
1119}
1120
1121void QueuedTask::run()
1122{
1123 m_microtask->run(m_globalObject->globalExec());
1124}
1125
1126void sanitizeStackForVM(VM* vm)
1127{
1128 logSanitizeStack(vm);
1129 if (vm->topCallFrame) {
1130 auto& stackBounds = Thread::current().stack();
1131 ASSERT(vm->currentThreadIsHoldingAPILock());
1132 ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1133 }
1134#if ENABLE(C_LOOP)
1135 vm->interpreter->cloopStack().sanitizeStack();
1136#else
1137 sanitizeStackForVMImpl(vm);
1138#endif
1139}
1140
1141size_t VM::committedStackByteCount()
1142{
1143#if !ENABLE(C_LOOP)
1144 // When using the C stack, we don't know how many stack pages are actually
1145 // committed. So, we use the current stack usage as an estimate.
1146 ASSERT(Thread::current().stack().isGrowingDownward());
1147 uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1148 uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1149 return high - current;
1150#else
1151 return CLoopStack::committedByteCount();
1152#endif
1153}
1154
1155#if ENABLE(C_LOOP)
1156bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1157{
1158 return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1159}
1160
1161bool VM::isSafeToRecurseSoftCLoop() const
1162{
1163 return interpreter->cloopStack().isSafeToRecurse();
1164}
1165#endif // ENABLE(C_LOOP)
1166
1167#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1168void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1169{
1170 if (!Options::validateExceptionChecks())
1171 return;
1172
1173 if (UNLIKELY(m_needExceptionCheck)) {
1174 auto throwDepth = m_simulatedThrowPointRecursionDepth;
1175 auto& throwLocation = m_simulatedThrowPointLocation;
1176
1177 dataLog(
1178 "ERROR: Unchecked JS exception:\n"
1179 " This scope can throw a JS exception: ", throwLocation, "\n"
1180 " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1181 " But the exception was unchecked as of this scope: ", location, "\n"
1182 " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1183 "\n");
1184
1185 StringPrintStream out;
1186 std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1187
1188 if (Options::dumpSimulatedThrows()) {
1189 out.println("The simulated exception was thrown at:");
1190 m_nativeStackTraceOfLastSimulatedThrow->dump(out, " ");
1191 out.println();
1192 }
1193 out.println("Unchecked exception detected at:");
1194 currentTrace->dump(out, " ");
1195 out.println();
1196
1197 dataLog(out.toCString());
1198 RELEASE_ASSERT(!m_needExceptionCheck);
1199 }
1200}
1201#endif
1202
1203#if USE(CF)
1204void VM::setRunLoop(CFRunLoopRef runLoop)
1205{
1206 ASSERT(runLoop);
1207 m_runLoop = runLoop;
1208 JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1209}
1210#endif // USE(CF)
1211
1212ScratchBuffer* VM::scratchBufferForSize(size_t size)
1213{
1214 if (!size)
1215 return nullptr;
1216
1217 auto locker = holdLock(m_scratchBufferLock);
1218
1219 if (size > m_sizeOfLastScratchBuffer) {
1220 // Protect against a N^2 memory usage pathology by ensuring
1221 // that at worst, we get a geometric series, meaning that the
1222 // total memory usage is somewhere around
1223 // max(scratch buffer size) * 4.
1224 m_sizeOfLastScratchBuffer = size * 2;
1225
1226 ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1227 RELEASE_ASSERT(newBuffer);
1228 m_scratchBuffers.append(newBuffer);
1229 }
1230
1231 ScratchBuffer* result = m_scratchBuffers.last();
1232 return result;
1233}
1234
1235void VM::clearScratchBuffers()
1236{
1237 auto lock = holdLock(m_scratchBufferLock);
1238 for (auto* scratchBuffer : m_scratchBuffers)
1239 scratchBuffer->setActiveLength(0);
1240}
1241
1242void VM::ensureShadowChicken()
1243{
1244 if (m_shadowChicken)
1245 return;
1246 m_shadowChicken = std::make_unique<ShadowChicken>();
1247}
1248
1249#define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1250 IsoSubspace* VM::name##Slow() \
1251 { \
1252 ASSERT(!m_##name); \
1253 auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1254 WTF::storeStoreFence(); \
1255 m_##name = WTFMove(space); \
1256 return m_##name.get(); \
1257 }
1258
1259
1260DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1261DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1262DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1263DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1264DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1265DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1266DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1267DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1268#if JSC_OBJC_API_ENABLED
1269DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1270#endif
1271#if ENABLE(WEBASSEMBLY)
1272DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1273DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction)
1274DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1275#endif
1276
1277#undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1278
1279#define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1280 IsoSubspace* VM::name##Slow() \
1281 { \
1282 ASSERT(!m_##name); \
1283 auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1284 WTF::storeStoreFence(); \
1285 m_##name = WTFMove(space); \
1286 return &m_##name->space; \
1287 }
1288
1289DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(inferredValueSpace, destructibleCellHeapCellType.get(), InferredValue)
1290DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1291DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1292
1293#undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1294
1295Structure* VM::setIteratorStructureSlow()
1296{
1297 ASSERT(!m_setIteratorStructure);
1298 m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1299 return m_setIteratorStructure.get();
1300}
1301
1302Structure* VM::mapIteratorStructureSlow()
1303{
1304 ASSERT(!m_mapIteratorStructure);
1305 m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1306 return m_mapIteratorStructure.get();
1307}
1308
1309JSCell* VM::sentinelSetBucketSlow()
1310{
1311 ASSERT(!m_sentinelSetBucket);
1312 auto* sentinel = JSSet::BucketType::createSentinel(*this);
1313 m_sentinelSetBucket.set(*this, sentinel);
1314 return sentinel;
1315}
1316
1317JSCell* VM::sentinelMapBucketSlow()
1318{
1319 ASSERT(!m_sentinelMapBucket);
1320 auto* sentinel = JSMap::BucketType::createSentinel(*this);
1321 m_sentinelMapBucket.set(*this, sentinel);
1322 return sentinel;
1323}
1324
1325JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1326{
1327 if (callFrame && callFrame->isGlobalExec()) {
1328 ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1329 ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1330 return callFrame->lexicalGlobalObject();
1331 }
1332 ASSERT(entryScope);
1333 return entryScope->globalObject();
1334}
1335
1336void VM::setCrashOnVMCreation(bool shouldCrash)
1337{
1338 vmCreationShouldCrash = shouldCrash;
1339}
1340
1341} // namespace JSC
1342