1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include "VM.h"
31
32#include "ArgList.h"
33#include "ArrayBufferNeuteringWatchpointSet.h"
34#include "BuiltinExecutables.h"
35#include "BytecodeIntrinsicRegistry.h"
36#include "CodeBlock.h"
37#include "CodeCache.h"
38#include "CommonIdentifiers.h"
39#include "CommonSlowPaths.h"
40#include "CustomGetterSetter.h"
41#include "DFGWorklist.h"
42#include "DirectEvalExecutable.h"
43#include "Disassembler.h"
44#include "DoublePredictionFuzzerAgent.h"
45#include "Error.h"
46#include "ErrorConstructor.h"
47#include "ErrorInstance.h"
48#include "EvalCodeBlock.h"
49#include "Exception.h"
50#include "ExecutableToCodeBlockEdge.h"
51#include "FTLThunks.h"
52#include "FastMallocAlignedMemoryAllocator.h"
53#include "FunctionCodeBlock.h"
54#include "FunctionConstructor.h"
55#include "FunctionExecutable.h"
56#include "GCActivityCallback.h"
57#include "GetterSetter.h"
58#include "GigacageAlignedMemoryAllocator.h"
59#include "HasOwnPropertyCache.h"
60#include "Heap.h"
61#include "HeapIterationScope.h"
62#include "HeapProfiler.h"
63#include "HostCallReturnValue.h"
64#include "Identifier.h"
65#include "IncrementalSweeper.h"
66#include "IndirectEvalExecutable.h"
67#include "InferredValue.h"
68#include "Interpreter.h"
69#include "IntlCollatorConstructor.h"
70#include "IntlDateTimeFormatConstructor.h"
71#include "IntlNumberFormatConstructor.h"
72#include "IntlPluralRulesConstructor.h"
73#include "JITCode.h"
74#include "JITWorklist.h"
75#include "JSAPIValueWrapper.h"
76#include "JSArray.h"
77#include "JSArrayBufferConstructor.h"
78#include "JSAsyncFunction.h"
79#include "JSBigInt.h"
80#include "JSBoundFunction.h"
81#include "JSCInlines.h"
82#include "JSCallbackFunction.h"
83#include "JSCustomGetterSetterFunction.h"
84#include "JSDestructibleObjectHeapCellType.h"
85#include "JSFixedArray.h"
86#include "JSFunction.h"
87#include "JSGlobalObjectFunctions.h"
88#include "JSImmutableButterfly.h"
89#include "JSInternalPromiseDeferred.h"
90#include "JSLock.h"
91#include "JSMap.h"
92#include "JSMapIterator.h"
93#include "JSPromiseDeferred.h"
94#include "JSPropertyNameEnumerator.h"
95#include "JSScriptFetchParameters.h"
96#include "JSScriptFetcher.h"
97#include "JSSet.h"
98#include "JSSetIterator.h"
99#include "JSSourceCode.h"
100#include "JSStringHeapCellType.h"
101#include "JSTemplateObjectDescriptor.h"
102#include "JSWeakMap.h"
103#include "JSWeakSet.h"
104#include "JSWebAssembly.h"
105#include "JSWebAssemblyCodeBlock.h"
106#include "JSWebAssemblyCodeBlockHeapCellType.h"
107#include "JSWithScope.h"
108#include "LLIntData.h"
109#include "Lexer.h"
110#include "Lookup.h"
111#include "MinimumReservedZoneSize.h"
112#include "ModuleProgramCodeBlock.h"
113#include "ModuleProgramExecutable.h"
114#include "NativeErrorConstructor.h"
115#include "NativeExecutable.h"
116#include "NativeStdFunctionCell.h"
117#include "Nodes.h"
118#include "ObjCCallbackFunction.h"
119#include "Parser.h"
120#include "ProfilerDatabase.h"
121#include "ProgramCodeBlock.h"
122#include "ProgramExecutable.h"
123#include "PromiseDeferredTimer.h"
124#include "PropertyMapHashTable.h"
125#include "ProxyRevoke.h"
126#include "RandomizingFuzzerAgent.h"
127#include "RegExpCache.h"
128#include "RegExpObject.h"
129#include "RegisterAtOffsetList.h"
130#include "RuntimeType.h"
131#include "SamplingProfiler.h"
132#include "ShadowChicken.h"
133#include "SimpleTypedArrayController.h"
134#include "SourceProviderCache.h"
135#include "StackVisitor.h"
136#include "StrictEvalActivation.h"
137#include "StrongInlines.h"
138#include "StructureInlines.h"
139#include "TestRunnerUtils.h"
140#include "ThunkGenerators.h"
141#include "TypeProfiler.h"
142#include "TypeProfilerLog.h"
143#include "UnlinkedCodeBlock.h"
144#include "VMEntryScope.h"
145#include "VMInlines.h"
146#include "VMInspector.h"
147#include "VariableEnvironment.h"
148#include "WasmWorklist.h"
149#include "Watchdog.h"
150#include "WeakGCMapInlines.h"
151#include "WebAssemblyFunction.h"
152#include "WebAssemblyFunctionHeapCellType.h"
153#include "WebAssemblyWrapperFunction.h"
154#include <wtf/ProcessID.h>
155#include <wtf/ReadWriteLock.h>
156#include <wtf/SimpleStats.h>
157#include <wtf/StringPrintStream.h>
158#include <wtf/Threading.h>
159#include <wtf/text/AtomicStringTable.h>
160#include <wtf/text/SymbolRegistry.h>
161
162#if ENABLE(C_LOOP)
163#include "CLoopStack.h"
164#include "CLoopStackInlines.h"
165#endif
166
167#if ENABLE(DFG_JIT)
168#include "ConservativeRoots.h"
169#endif
170
171#if ENABLE(REGEXP_TRACING)
172#include "RegExp.h"
173#endif
174
175namespace JSC {
176
177#if ENABLE(JIT)
178#if !ASSERT_DISABLED
179bool VM::s_canUseJITIsSet = false;
180#endif
181bool VM::s_canUseJIT = false;
182#endif
183
184Atomic<unsigned> VM::s_numberOfIDs;
185
186// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190#if ENABLE(ASSEMBLER)
191static bool enableAssembler()
192{
193 if (!Options::useJIT())
194 return false;
195
196 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197 if (canUseJITString && !atoi(canUseJITString))
198 return false;
199
200 ExecutableAllocator::initializeUnderlyingAllocator();
201 if (!ExecutableAllocator::singleton().isValid()) {
202 if (Options::crashIfCantAllocateJITMemory())
203 CRASH();
204 return false;
205 }
206
207 return true;
208}
209#endif // ENABLE(!ASSEMBLER)
210
211bool VM::canUseAssembler()
212{
213#if ENABLE(ASSEMBLER)
214 static std::once_flag onceKey;
215 static bool enabled = false;
216 std::call_once(onceKey, [] {
217 enabled = enableAssembler();
218 });
219 return enabled;
220#else
221 return false; // interpreter only
222#endif
223}
224
225void VM::computeCanUseJIT()
226{
227#if ENABLE(JIT)
228#if !ASSERT_DISABLED
229 RELEASE_ASSERT(!s_canUseJITIsSet);
230 s_canUseJITIsSet = true;
231#endif
232 s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233#endif
234}
235
236bool VM::isInMiniMode()
237{
238 return !canUseJIT() || Options::forceMiniVMMode();
239}
240
241inline unsigned VM::nextID()
242{
243 for (;;) {
244 unsigned currentNumberOfIDs = s_numberOfIDs.load();
245 unsigned newID = currentNumberOfIDs + 1;
246 if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
247 return newID;
248 }
249}
250
251static bool vmCreationShouldCrash = false;
252
253VM::VM(VMType vmType, HeapType heapType)
254 : m_id(nextID())
255 , m_apiLock(adoptRef(new JSLock(this)))
256#if USE(CF)
257 , m_runLoop(CFRunLoopGetCurrent())
258#endif // USE(CF)
259 , heap(this, heapType)
260 , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
261 , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
262 , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
263 , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
264 , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
265 , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
266 , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
267 , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
268 , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
269#if ENABLE(WEBASSEMBLY)
270 , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
271 , webAssemblyFunctionHeapCellType(std::make_unique<WebAssemblyFunctionHeapCellType>())
272#endif
273 , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
274 , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
275 , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
276 , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
277 , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
278 , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
279 , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
280 , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
281 , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
282 , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
283 , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
284 , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
285 , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
286 , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
287 , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
288 , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
289 , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
290 , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
291 , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
292 , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
293 , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
294 , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
295 , vmType(vmType)
296 , clientData(0)
297 , topEntryFrame(nullptr)
298 , topCallFrame(CallFrame::noCaller())
299 , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
300 , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
301 , propertyNames(nullptr)
302 , emptyList(new ArgList)
303 , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
304 , customGetterSetterFunctionMap(*this)
305 , stringCache(*this)
306 , symbolImplToSymbolMap(*this)
307 , structureCache(*this)
308 , interpreter(0)
309 , entryScope(0)
310 , m_regExpCache(new RegExpCache(this))
311 , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
312#if ENABLE(REGEXP_TRACING)
313 , m_rtTraceList(new RTTraceList())
314#endif
315#if ENABLE(GC_VALIDATION)
316 , m_initializingObjectClass(0)
317#endif
318 , m_stackPointerAtVMEntry(0)
319 , m_codeCache(std::make_unique<CodeCache>())
320 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
321 , m_typeProfilerEnabledCount(0)
322 , m_primitiveGigacageEnabled(IsWatched)
323 , m_controlFlowProfilerEnabledCount(0)
324{
325 if (UNLIKELY(vmCreationShouldCrash))
326 CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
327
328 interpreter = new Interpreter(*this);
329 StackBounds stack = Thread::current().stack();
330 updateSoftReservedZoneSize(Options::softReservedZoneSize());
331 setLastStackTop(stack.origin());
332
333 JSRunLoopTimer::Manager::shared().registerVM(*this);
334
335 // Need to be careful to keep everything consistent here
336 JSLockHolder lock(this);
337 AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
338 structureStructure.set(*this, Structure::createStructure(*this));
339 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
340 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
341
342 smallStrings.initializeCommonStrings(*this);
343
344 propertyNames = new CommonIdentifiers(this);
345 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
346 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
347 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
348 domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
349 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
350 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
351 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
352 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
353 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
354 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
355#if ENABLE(WEBASSEMBLY)
356 webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
357#endif
358 moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
359 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
360 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
361 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
362 fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
363
364 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
365 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
366 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
367
368 sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
369 scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
370 scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
371 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
372 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
373 templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
374 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
375 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
376 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
377 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
378 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
379 unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
380 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
381 if (VM::canUseJIT())
382 inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
383 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
384 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
385 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
386 internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
387 nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
388 programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
389 moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
390 evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
391 functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
392 hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
393 hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
394 bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
395 executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
396
397 // Eagerly initialize constant cells since the concurrent compiler can access them.
398 if (canUseJIT()) {
399 sentinelMapBucket();
400 sentinelSetBucket();
401 }
402
403 Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
404
405#if !ENABLE(C_LOOP)
406 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
407#endif
408
409 Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
410
411 heap.notifyIsSafeToCollect();
412
413 LLInt::Data::performAssertions(*this);
414
415 if (UNLIKELY(Options::useProfiler())) {
416 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
417
418 StringPrintStream pathOut;
419 const char* profilerPath = getenv("JSC_PROFILER_PATH");
420 if (profilerPath)
421 pathOut.print(profilerPath, "/");
422 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
423 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
424 }
425
426 callFrameForCatch = nullptr;
427
428 // Initialize this last, as a free way of asserting that VM initialization itself
429 // won't use this.
430 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
431
432 m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
433
434 if (Options::useTypeProfiler())
435 enableTypeProfiler();
436 if (Options::useControlFlowProfiler())
437 enableControlFlowProfiler();
438#if ENABLE(SAMPLING_PROFILER)
439 if (Options::useSamplingProfiler()) {
440 setShouldBuildPCToCodeOriginMapping();
441 Ref<Stopwatch> stopwatch = Stopwatch::create();
442 stopwatch->start();
443 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
444 if (Options::samplingProfilerPath())
445 m_samplingProfiler->registerForReportAtExit();
446 m_samplingProfiler->start();
447 }
448#endif // ENABLE(SAMPLING_PROFILER)
449
450 if (Options::useRandomizingFuzzerAgent())
451 setFuzzerAgent(std::make_unique<RandomizingFuzzerAgent>(*this));
452 else if (Options::useDoublePredictionFuzzerAgent())
453 setFuzzerAgent(std::make_unique<DoublePredictionFuzzerAgent>(*this));
454
455 if (Options::alwaysGeneratePCToCodeOriginMap())
456 setShouldBuildPCToCodeOriginMapping();
457
458 if (Options::watchdog()) {
459 Watchdog& watchdog = ensureWatchdog();
460 watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
461 }
462
463#if ENABLE(JIT)
464 // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
465 if (canUseJIT()) {
466 jitStubs = std::make_unique<JITThunks>();
467#if ENABLE(FTL_JIT)
468 ftlThunks = std::make_unique<FTL::Thunks>();
469#endif // ENABLE(FTL_JIT)
470 getCTIInternalFunctionTrampolineFor(CodeForCall);
471 getCTIInternalFunctionTrampolineFor(CodeForConstruct);
472 }
473#endif
474
475 if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
476 ensureShadowChicken();
477
478 VMInspector::instance().add(this);
479}
480
481static ReadWriteLock s_destructionLock;
482
483void waitForVMDestruction()
484{
485 auto locker = holdLock(s_destructionLock.write());
486}
487
488VM::~VM()
489{
490 auto destructionLocker = holdLock(s_destructionLock.read());
491
492 Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
493 promiseDeferredTimer->stopRunningTasks();
494#if ENABLE(WEBASSEMBLY)
495 if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
496 worklist->stopAllPlansForContext(wasmContext);
497#endif
498 if (UNLIKELY(m_watchdog))
499 m_watchdog->willDestroyVM(this);
500 m_traps.willDestroyVM();
501 VMInspector::instance().remove(this);
502
503 // Never GC, ever again.
504 heap.incrementDeferralDepth();
505
506#if ENABLE(SAMPLING_PROFILER)
507 if (m_samplingProfiler) {
508 m_samplingProfiler->reportDataToOptionFile();
509 m_samplingProfiler->shutdown();
510 }
511#endif // ENABLE(SAMPLING_PROFILER)
512
513#if ENABLE(JIT)
514 if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
515 worklist->completeAllForVM(*this);
516#endif // ENABLE(JIT)
517
518#if ENABLE(DFG_JIT)
519 // Make sure concurrent compilations are done, but don't install them, since there is
520 // no point to doing so.
521 for (unsigned i = DFG::numberOfWorklists(); i--;) {
522 if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
523 worklist->removeNonCompilingPlansForVM(*this);
524 worklist->waitUntilAllPlansForVMAreReady(*this);
525 worklist->removeAllReadyPlansForVM(*this);
526 }
527 }
528#endif // ENABLE(DFG_JIT)
529
530 waitForAsynchronousDisassembly();
531
532 // Clear this first to ensure that nobody tries to remove themselves from it.
533 m_perBytecodeProfiler = nullptr;
534
535 ASSERT(currentThreadIsHoldingAPILock());
536 m_apiLock->willDestroyVM(this);
537 smallStrings.setIsInitialized(false);
538 heap.lastChanceToFinalize();
539
540 JSRunLoopTimer::Manager::shared().unregisterVM(*this);
541
542 delete interpreter;
543#ifndef NDEBUG
544 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
545#endif
546
547 delete emptyList;
548
549 delete propertyNames;
550 if (vmType != Default)
551 delete m_atomicStringTable;
552
553 delete clientData;
554 delete m_regExpCache;
555
556#if ENABLE(REGEXP_TRACING)
557 delete m_rtTraceList;
558#endif
559
560#if ENABLE(DFG_JIT)
561 for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
562 fastFree(m_scratchBuffers[i]);
563#endif
564}
565
566void VM::primitiveGigacageDisabledCallback(void* argument)
567{
568 static_cast<VM*>(argument)->primitiveGigacageDisabled();
569}
570
571void VM::primitiveGigacageDisabled()
572{
573 if (m_apiLock->currentThreadIsHoldingLock()) {
574 m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
575 return;
576 }
577
578 // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
579 // uncaged buffer in a nicely synchronized manner.
580 m_needToFirePrimitiveGigacageEnabled = true;
581}
582
583void VM::setLastStackTop(void* lastStackTop)
584{
585 m_lastStackTop = lastStackTop;
586}
587
588Ref<VM> VM::createContextGroup(HeapType heapType)
589{
590 return adoptRef(*new VM(APIContextGroup, heapType));
591}
592
593Ref<VM> VM::create(HeapType heapType)
594{
595 return adoptRef(*new VM(Default, heapType));
596}
597
598bool VM::sharedInstanceExists()
599{
600 return sharedInstanceInternal();
601}
602
603VM& VM::sharedInstance()
604{
605 GlobalJSLock globalLock;
606 VM*& instance = sharedInstanceInternal();
607 if (!instance)
608 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
609 return *instance;
610}
611
612VM*& VM::sharedInstanceInternal()
613{
614 static VM* sharedInstance;
615 return sharedInstance;
616}
617
618Watchdog& VM::ensureWatchdog()
619{
620 if (!m_watchdog)
621 m_watchdog = adoptRef(new Watchdog(this));
622 return *m_watchdog;
623}
624
625HeapProfiler& VM::ensureHeapProfiler()
626{
627 if (!m_heapProfiler)
628 m_heapProfiler = std::make_unique<HeapProfiler>(*this);
629 return *m_heapProfiler;
630}
631
632#if ENABLE(SAMPLING_PROFILER)
633SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
634{
635 if (!m_samplingProfiler)
636 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
637 return *m_samplingProfiler;
638}
639#endif // ENABLE(SAMPLING_PROFILER)
640
641#if ENABLE(JIT)
642static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
643{
644 switch (intrinsic) {
645 case CharCodeAtIntrinsic:
646 return charCodeAtThunkGenerator;
647 case CharAtIntrinsic:
648 return charAtThunkGenerator;
649 case Clz32Intrinsic:
650 return clz32ThunkGenerator;
651 case FromCharCodeIntrinsic:
652 return fromCharCodeThunkGenerator;
653 case SqrtIntrinsic:
654 return sqrtThunkGenerator;
655 case AbsIntrinsic:
656 return absThunkGenerator;
657 case FloorIntrinsic:
658 return floorThunkGenerator;
659 case CeilIntrinsic:
660 return ceilThunkGenerator;
661 case TruncIntrinsic:
662 return truncThunkGenerator;
663 case RoundIntrinsic:
664 return roundThunkGenerator;
665 case ExpIntrinsic:
666 return expThunkGenerator;
667 case LogIntrinsic:
668 return logThunkGenerator;
669 case IMulIntrinsic:
670 return imulThunkGenerator;
671 case RandomIntrinsic:
672 return randomThunkGenerator;
673 case BoundThisNoArgsFunctionCallIntrinsic:
674 return boundThisNoArgsFunctionCallGenerator;
675 default:
676 return nullptr;
677 }
678}
679
680#endif // ENABLE(JIT)
681
682NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
683{
684 return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
685}
686
687static Ref<NativeJITCode> jitCodeForCallTrampoline()
688{
689 static NativeJITCode* result;
690 static std::once_flag onceKey;
691 std::call_once(onceKey, [&] {
692 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
693 });
694 return makeRef(*result);
695}
696
697static Ref<NativeJITCode> jitCodeForConstructTrampoline()
698{
699 static NativeJITCode* result;
700 static std::once_flag onceKey;
701 std::call_once(onceKey, [&] {
702 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
703 });
704 return makeRef(*result);
705}
706
707NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
708{
709#if ENABLE(JIT)
710 if (canUseJIT()) {
711 return jitStubs->hostFunctionStub(
712 this, function, constructor,
713 intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
714 intrinsic, signature, name);
715 }
716#endif // ENABLE(JIT)
717 UNUSED_PARAM(intrinsic);
718 UNUSED_PARAM(signature);
719 return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
720}
721
722MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
723{
724#if ENABLE(JIT)
725 if (canUseJIT()) {
726 if (kind == CodeForCall)
727 return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
728 return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
729 }
730#endif
731 if (kind == CodeForCall)
732 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
733 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
734}
735
736VM::ClientData::~ClientData()
737{
738}
739
740void VM::resetDateCache()
741{
742 localTimeOffsetCache.reset();
743 cachedDateString = String();
744 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
745 dateInstanceCache.reset();
746}
747
748void VM::whenIdle(Function<void()>&& callback)
749{
750 if (!entryScope) {
751 callback();
752 return;
753 }
754
755 entryScope->addDidPopListener(WTFMove(callback));
756}
757
758void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
759{
760 whenIdle([=] () {
761 heap.deleteAllCodeBlocks(effort);
762 });
763}
764
765void VM::deleteAllCode(DeleteAllCodeEffort effort)
766{
767 whenIdle([=] () {
768 m_codeCache->clear();
769 m_regExpCache->deleteAllCode();
770 heap.deleteAllCodeBlocks(effort);
771 heap.deleteAllUnlinkedCodeBlocks(effort);
772 heap.reportAbandonedObjectGraph();
773 });
774}
775
776void VM::shrinkFootprintWhenIdle()
777{
778 whenIdle([=] () {
779 sanitizeStackForVM(this);
780 deleteAllCode(DeleteAllCodeIfNotCollecting);
781 heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
782 // FIXME: Consider stopping various automatic threads here.
783 // https://bugs.webkit.org/show_bug.cgi?id=185447
784 WTF::releaseFastMallocFreeMemory();
785 });
786}
787
788SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
789{
790 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
791 if (addResult.isNewEntry)
792 addResult.iterator->value = adoptRef(new SourceProviderCache);
793 return addResult.iterator->value.get();
794}
795
796void VM::clearSourceProviderCaches()
797{
798 sourceProviderCacheMap.clear();
799}
800
801Exception* VM::throwException(ExecState* exec, Exception* exception)
802{
803 ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
804 CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
805
806 if (Options::breakOnThrow()) {
807 CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
808 dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
809 CRASH();
810 }
811
812 interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
813
814 setException(exception);
815
816#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
817 m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
818 m_throwingThread = &Thread::current();
819#endif
820 return exception;
821}
822
823Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
824{
825 VM& vm = *this;
826 Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
827 if (!exception)
828 exception = Exception::create(*this, thrownValue);
829
830 return throwException(exec, exception);
831}
832
833Exception* VM::throwException(ExecState* exec, JSObject* error)
834{
835 return throwException(exec, JSValue(error));
836}
837
838void VM::setStackPointerAtVMEntry(void* sp)
839{
840 m_stackPointerAtVMEntry = sp;
841 updateStackLimits();
842}
843
844size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
845{
846 size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
847 m_currentSoftReservedZoneSize = softReservedZoneSize;
848#if ENABLE(C_LOOP)
849 interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
850#endif
851
852 updateStackLimits();
853
854 return oldSoftReservedZoneSize;
855}
856
857#if OS(WINDOWS)
858// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
859// where the guard page is a barrier between committed and uncommitted memory.
860// When data from the guard page is read or written, the guard page is moved, and memory is committed.
861// This is how the system grows the stack.
862// When using the C stack on Windows we need to precommit the needed stack space.
863// Otherwise we might crash later if we access uncommitted stack memory.
864// This can happen if we allocate stack space larger than the page guard size (4K).
865// The system does not get the chance to move the guard page, and commit more memory,
866// and we crash if uncommitted memory is accessed.
867// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
868// when needed, see http://support.microsoft.com/kb/100775.
869// By touching every page up to the stack limit with a dummy operation,
870// we force the system to move the guard page, and commit memory.
871
872static void preCommitStackMemory(void* stackLimit)
873{
874 const int pageSize = 4096;
875 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
876 char ch = *p;
877 *p = ch;
878 }
879}
880#endif
881
882inline void VM::updateStackLimits()
883{
884#if OS(WINDOWS)
885 void* lastSoftStackLimit = m_softStackLimit;
886#endif
887
888 const StackBounds& stack = Thread::current().stack();
889 size_t reservedZoneSize = Options::reservedZoneSize();
890 // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
891 // options initialization time, and the option value should not have been changed thereafter.
892 // We don't have the ability to assert here that it hasn't changed, but we can at least assert
893 // that the value is sane.
894 RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
895
896 if (m_stackPointerAtVMEntry) {
897 ASSERT(stack.isGrowingDownward());
898 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
899 m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
900 m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
901 } else {
902 m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
903 m_stackLimit = stack.recursionLimit(reservedZoneSize);
904 }
905
906#if OS(WINDOWS)
907 // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
908 // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
909 // generated code which can allocate stack space that the C++ compiler does not know
910 // about. As such, we have to precommit that stack memory manually.
911 //
912 // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
913 // used exclusively by C++ code, and the C++ compiler will automatically commit the
914 // needed stack pages.
915 if (lastSoftStackLimit != m_softStackLimit)
916 preCommitStackMemory(m_softStackLimit);
917#endif
918}
919
920#if ENABLE(DFG_JIT)
921void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
922{
923 auto lock = holdLock(m_scratchBufferLock);
924 for (auto* scratchBuffer : m_scratchBuffers) {
925 if (scratchBuffer->activeLength()) {
926 void* bufferStart = scratchBuffer->dataBuffer();
927 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
928 }
929 }
930}
931#endif
932
933void logSanitizeStack(VM* vm)
934{
935 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
936 int dummy;
937 auto& stackBounds = Thread::current().stack();
938 dataLog(
939 "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
940 ", current stack pointer at ", RawPointer(&dummy), ", in ",
941 pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
942 vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
943 }
944}
945
946#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
947char* VM::acquireRegExpPatternContexBuffer()
948{
949 m_regExpPatternContextLock.lock();
950 ASSERT(m_regExpPatternContextLock.isLocked());
951 if (!m_regExpPatternContexBuffer)
952 m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
953 return m_regExpPatternContexBuffer.get();
954}
955
956void VM::releaseRegExpPatternContexBuffer()
957{
958 ASSERT(m_regExpPatternContextLock.isLocked());
959
960 m_regExpPatternContextLock.unlock();
961}
962#endif
963
964#if ENABLE(REGEXP_TRACING)
965void VM::addRegExpToTrace(RegExp* regExp)
966{
967 gcProtect(regExp);
968 m_rtTraceList->add(regExp);
969}
970
971void VM::dumpRegExpTrace()
972{
973 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
974 RTTraceList::iterator iter = ++m_rtTraceList->begin();
975
976 if (iter != m_rtTraceList->end()) {
977 dataLogF("\nRegExp Tracing\n");
978 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
979 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
980 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
981
982 unsigned reCount = 0;
983
984 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
985 (*iter)->printTraceData();
986 gcUnprotect(*iter);
987 }
988
989 dataLogF("%d Regular Expressions\n", reCount);
990 }
991
992 m_rtTraceList->clear();
993}
994#else
995void VM::dumpRegExpTrace()
996{
997}
998#endif
999
1000WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1001{
1002 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1003 if (result.isNewEntry)
1004 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1005 return result.iterator->value.get();
1006}
1007
1008void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1009{
1010 ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1011}
1012
1013void VM::addImpureProperty(const String& propertyName)
1014{
1015 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1016 watchpointSet->fireAll(*this, "Impure property added");
1017}
1018
1019template<typename Func>
1020static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1021{
1022 bool needsToRecompile = false;
1023 if (!counter) {
1024 doEnableWork();
1025 needsToRecompile = true;
1026 }
1027 counter++;
1028
1029 return needsToRecompile;
1030}
1031
1032template<typename Func>
1033static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1034{
1035 RELEASE_ASSERT(counter > 0);
1036 bool needsToRecompile = false;
1037 counter--;
1038 if (!counter) {
1039 doDisableWork();
1040 needsToRecompile = true;
1041 }
1042
1043 return needsToRecompile;
1044}
1045
1046bool VM::enableTypeProfiler()
1047{
1048 auto enableTypeProfiler = [this] () {
1049 this->m_typeProfiler = std::make_unique<TypeProfiler>();
1050 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1051 };
1052
1053 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1054}
1055
1056bool VM::disableTypeProfiler()
1057{
1058 auto disableTypeProfiler = [this] () {
1059 this->m_typeProfiler.reset(nullptr);
1060 this->m_typeProfilerLog.reset(nullptr);
1061 };
1062
1063 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1064}
1065
1066bool VM::enableControlFlowProfiler()
1067{
1068 auto enableControlFlowProfiler = [this] () {
1069 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1070 };
1071
1072 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1073}
1074
1075bool VM::disableControlFlowProfiler()
1076{
1077 auto disableControlFlowProfiler = [this] () {
1078 this->m_controlFlowProfiler.reset(nullptr);
1079 };
1080
1081 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1082}
1083
1084void VM::dumpTypeProfilerData()
1085{
1086 if (!typeProfiler())
1087 return;
1088
1089 typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1090 typeProfiler()->dumpTypeProfilerData(*this);
1091}
1092
1093void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1094{
1095 m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1096}
1097
1098void VM::drainMicrotasks()
1099{
1100 while (!m_microtaskQueue.isEmpty()) {
1101 m_microtaskQueue.takeFirst()->run();
1102 if (m_onEachMicrotaskTick)
1103 m_onEachMicrotaskTick(*this);
1104 }
1105}
1106
1107void QueuedTask::run()
1108{
1109 m_microtask->run(m_globalObject->globalExec());
1110}
1111
1112void sanitizeStackForVM(VM* vm)
1113{
1114 logSanitizeStack(vm);
1115 if (vm->topCallFrame) {
1116 auto& stackBounds = Thread::current().stack();
1117 ASSERT(vm->currentThreadIsHoldingAPILock());
1118 ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1119 }
1120#if ENABLE(C_LOOP)
1121 vm->interpreter->cloopStack().sanitizeStack();
1122#else
1123 sanitizeStackForVMImpl(vm);
1124#endif
1125}
1126
1127size_t VM::committedStackByteCount()
1128{
1129#if !ENABLE(C_LOOP)
1130 // When using the C stack, we don't know how many stack pages are actually
1131 // committed. So, we use the current stack usage as an estimate.
1132 ASSERT(Thread::current().stack().isGrowingDownward());
1133 uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1134 uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1135 return high - current;
1136#else
1137 return CLoopStack::committedByteCount();
1138#endif
1139}
1140
1141#if ENABLE(C_LOOP)
1142bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1143{
1144 return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1145}
1146
1147bool VM::isSafeToRecurseSoftCLoop() const
1148{
1149 return interpreter->cloopStack().isSafeToRecurse();
1150}
1151#endif // ENABLE(C_LOOP)
1152
1153#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1154void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1155{
1156 if (!Options::validateExceptionChecks())
1157 return;
1158
1159 if (UNLIKELY(m_needExceptionCheck)) {
1160 auto throwDepth = m_simulatedThrowPointRecursionDepth;
1161 auto& throwLocation = m_simulatedThrowPointLocation;
1162
1163 dataLog(
1164 "ERROR: Unchecked JS exception:\n"
1165 " This scope can throw a JS exception: ", throwLocation, "\n"
1166 " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1167 " But the exception was unchecked as of this scope: ", location, "\n"
1168 " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1169 "\n");
1170
1171 StringPrintStream out;
1172 std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1173
1174 if (Options::dumpSimulatedThrows()) {
1175 out.println("The simulated exception was thrown at:");
1176 m_nativeStackTraceOfLastSimulatedThrow->dump(out, " ");
1177 out.println();
1178 }
1179 out.println("Unchecked exception detected at:");
1180 currentTrace->dump(out, " ");
1181 out.println();
1182
1183 dataLog(out.toCString());
1184 RELEASE_ASSERT(!m_needExceptionCheck);
1185 }
1186}
1187#endif
1188
1189#if USE(CF)
1190void VM::setRunLoop(CFRunLoopRef runLoop)
1191{
1192 ASSERT(runLoop);
1193 m_runLoop = runLoop;
1194 JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1195}
1196#endif // USE(CF)
1197
1198ScratchBuffer* VM::scratchBufferForSize(size_t size)
1199{
1200 if (!size)
1201 return nullptr;
1202
1203 auto locker = holdLock(m_scratchBufferLock);
1204
1205 if (size > m_sizeOfLastScratchBuffer) {
1206 // Protect against a N^2 memory usage pathology by ensuring
1207 // that at worst, we get a geometric series, meaning that the
1208 // total memory usage is somewhere around
1209 // max(scratch buffer size) * 4.
1210 m_sizeOfLastScratchBuffer = size * 2;
1211
1212 ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1213 RELEASE_ASSERT(newBuffer);
1214 m_scratchBuffers.append(newBuffer);
1215 }
1216
1217 ScratchBuffer* result = m_scratchBuffers.last();
1218 return result;
1219}
1220
1221void VM::clearScratchBuffers()
1222{
1223 auto lock = holdLock(m_scratchBufferLock);
1224 for (auto* scratchBuffer : m_scratchBuffers)
1225 scratchBuffer->setActiveLength(0);
1226}
1227
1228void VM::ensureShadowChicken()
1229{
1230 if (m_shadowChicken)
1231 return;
1232 m_shadowChicken = std::make_unique<ShadowChicken>();
1233}
1234
1235#define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1236 IsoSubspace* VM::name##Slow() \
1237 { \
1238 ASSERT(!m_##name); \
1239 auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1240 WTF::storeStoreFence(); \
1241 m_##name = WTFMove(space); \
1242 return m_##name.get(); \
1243 }
1244
1245
1246DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1247DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1248DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1249DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1250DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1251DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1252DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1253DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1254#if JSC_OBJC_API_ENABLED
1255DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1256#endif
1257#if ENABLE(WEBASSEMBLY)
1258DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1259DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction)
1260DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1261#endif
1262
1263#undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1264
1265#define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1266 IsoSubspace* VM::name##Slow() \
1267 { \
1268 ASSERT(!m_##name); \
1269 auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1270 WTF::storeStoreFence(); \
1271 m_##name = WTFMove(space); \
1272 return &m_##name->space; \
1273 }
1274
1275DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(inferredValueSpace, destructibleCellHeapCellType.get(), InferredValue)
1276DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1277DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1278
1279#undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1280
1281Structure* VM::setIteratorStructureSlow()
1282{
1283 ASSERT(!m_setIteratorStructure);
1284 m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1285 return m_setIteratorStructure.get();
1286}
1287
1288Structure* VM::mapIteratorStructureSlow()
1289{
1290 ASSERT(!m_mapIteratorStructure);
1291 m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1292 return m_mapIteratorStructure.get();
1293}
1294
1295JSCell* VM::sentinelSetBucketSlow()
1296{
1297 ASSERT(!m_sentinelSetBucket);
1298 auto* sentinel = JSSet::BucketType::createSentinel(*this);
1299 m_sentinelSetBucket.set(*this, sentinel);
1300 return sentinel;
1301}
1302
1303JSCell* VM::sentinelMapBucketSlow()
1304{
1305 ASSERT(!m_sentinelMapBucket);
1306 auto* sentinel = JSMap::BucketType::createSentinel(*this);
1307 m_sentinelMapBucket.set(*this, sentinel);
1308 return sentinel;
1309}
1310
1311JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1312{
1313 if (callFrame && callFrame->isGlobalExec()) {
1314 ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1315 ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1316 return callFrame->lexicalGlobalObject();
1317 }
1318 ASSERT(entryScope);
1319 return entryScope->globalObject();
1320}
1321
1322void VM::setCrashOnVMCreation(bool shouldCrash)
1323{
1324 vmCreationShouldCrash = shouldCrash;
1325}
1326
1327} // namespace JSC
1328