1/*
2 * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#include "BytecodeStructs.h"
29#include "CodeBlock.h"
30#include "CodeSpecializationKind.h"
31#include "DirectArguments.h"
32#include "ExceptionHelpers.h"
33#include "FunctionCodeBlock.h"
34#include "JSImmutableButterfly.h"
35#include "ScopedArguments.h"
36#include "SlowPathReturnType.h"
37#include "StackAlignment.h"
38#include "VMInlines.h"
39#include <wtf/StdLibExtras.h>
40
41namespace JSC {
42
43// The purpose of this namespace is to include slow paths that are shared
44// between the interpreter and baseline JIT. They are written to be agnostic
45// with respect to the slow-path calling convention, but they do rely on the
46// JS code being executed more-or-less directly from bytecode (so the call
47// frame layout is unmodified, making it potentially awkward to use these
48// from any optimizing JIT, like the DFG).
49
50namespace CommonSlowPaths {
51
52ALWAYS_INLINE int numberOfExtraSlots(int argumentCountIncludingThis)
53{
54 int frameSize = argumentCountIncludingThis + CallFrame::headerSizeInRegisters;
55 int alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
56 return alignedFrameSize - frameSize;
57}
58
59ALWAYS_INLINE int numberOfStackPaddingSlots(CodeBlock* codeBlock, int argumentCountIncludingThis)
60{
61 if (argumentCountIncludingThis >= codeBlock->numParameters())
62 return 0;
63 int alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), argumentCountIncludingThis + CallFrame::headerSizeInRegisters);
64 int alignedFrameSizeForParameters = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), codeBlock->numParameters() + CallFrame::headerSizeInRegisters);
65 return alignedFrameSizeForParameters - alignedFrameSize;
66}
67
68ALWAYS_INLINE int numberOfStackPaddingSlotsWithExtraSlots(CodeBlock* codeBlock, int argumentCountIncludingThis)
69{
70 if (argumentCountIncludingThis >= codeBlock->numParameters())
71 return 0;
72 return numberOfStackPaddingSlots(codeBlock, argumentCountIncludingThis) + numberOfExtraSlots(argumentCountIncludingThis);
73}
74
75ALWAYS_INLINE CodeBlock* codeBlockFromCallFrameCallee(CallFrame* callFrame, CodeSpecializationKind kind)
76{
77 JSFunction* callee = jsCast<JSFunction*>(callFrame->jsCallee());
78 ASSERT(!callee->isHostFunction());
79 return callee->jsExecutable()->codeBlockFor(kind);
80}
81
82ALWAYS_INLINE int arityCheckFor(VM& vm, CallFrame* callFrame, CodeSpecializationKind kind)
83{
84 CodeBlock* newCodeBlock = codeBlockFromCallFrameCallee(callFrame, kind);
85 ASSERT(callFrame->argumentCountIncludingThis() < static_cast<unsigned>(newCodeBlock->numParameters()));
86 int padding = numberOfStackPaddingSlotsWithExtraSlots(newCodeBlock, callFrame->argumentCountIncludingThis());
87
88 Register* newStack = callFrame->registers() - WTF::roundUpToMultipleOf(stackAlignmentRegisters(), padding);
89
90 if (UNLIKELY(!vm.ensureStackCapacityFor(newStack)))
91 return -1;
92 return padding;
93}
94
95inline bool opInByVal(JSGlobalObject* globalObject, JSValue baseVal, JSValue propName, ArrayProfile* arrayProfile = nullptr)
96{
97 VM& vm = getVM(globalObject);
98 auto scope = DECLARE_THROW_SCOPE(vm);
99 if (!baseVal.isObject()) {
100 throwException(globalObject, scope, createInvalidInParameterError(globalObject, baseVal));
101 return false;
102 }
103
104 JSObject* baseObj = asObject(baseVal);
105 if (arrayProfile)
106 arrayProfile->observeStructure(baseObj->structure(vm));
107
108 uint32_t i;
109 if (propName.getUInt32(i)) {
110 if (arrayProfile)
111 arrayProfile->observeIndexedRead(vm, baseObj, i);
112 RELEASE_AND_RETURN(scope, baseObj->hasProperty(globalObject, i));
113 }
114
115 auto property = propName.toPropertyKey(globalObject);
116 RETURN_IF_EXCEPTION(scope, false);
117 RELEASE_AND_RETURN(scope, baseObj->hasProperty(globalObject, property));
118}
119
120inline void tryCachePutToScopeGlobal(
121 JSGlobalObject* globalObject, CodeBlock* codeBlock, OpPutToScope& bytecode, JSObject* scope,
122 PutPropertySlot& slot, const Identifier& ident)
123{
124 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
125 auto& metadata = bytecode.metadata(codeBlock);
126 ResolveType resolveType = metadata.m_getPutInfo.resolveType();
127
128 switch (resolveType) {
129 case UnresolvedProperty:
130 case UnresolvedPropertyWithVarInjectionChecks: {
131 if (scope->isGlobalObject()) {
132 ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalPropertyWithVarInjectionChecks : GlobalProperty;
133 resolveType = newResolveType; // Allow below caching mechanism to kick in.
134 ConcurrentJSLocker locker(codeBlock->m_lock);
135 metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
136 break;
137 }
138 FALLTHROUGH;
139 }
140 case GlobalProperty:
141 case GlobalPropertyWithVarInjectionChecks: {
142 // Global Lexical Binding Epoch is changed. Update op_get_from_scope from GlobalProperty to GlobalLexicalVar.
143 if (scope->isGlobalLexicalEnvironment()) {
144 JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalLexicalEnvironment*>(scope);
145 ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar;
146 SymbolTableEntry entry = globalLexicalEnvironment->symbolTable()->get(ident.impl());
147 ASSERT(!entry.isNull());
148 ConcurrentJSLocker locker(codeBlock->m_lock);
149 metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
150 metadata.m_watchpointSet = entry.watchpointSet();
151 metadata.m_operand = reinterpret_cast<uintptr_t>(globalLexicalEnvironment->variableAt(entry.scopeOffset()).slot());
152 return;
153 }
154 break;
155 }
156 default:
157 return;
158 }
159
160 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks) {
161 VM& vm = getVM(globalObject);
162 JSGlobalObject* globalObject = codeBlock->globalObject();
163 ASSERT(globalObject == scope || globalObject->varInjectionWatchpoint()->hasBeenInvalidated());
164 if (!slot.isCacheablePut()
165 || slot.base() != scope
166 || scope != globalObject
167 || !scope->structure(vm)->propertyAccessesAreCacheable())
168 return;
169
170 if (slot.type() == PutPropertySlot::NewProperty) {
171 // Don't cache if we've done a transition. We want to detect the first replace so that we
172 // can invalidate the watchpoint.
173 return;
174 }
175
176 scope->structure(vm)->didCachePropertyReplacement(vm, slot.cachedOffset());
177
178 ConcurrentJSLocker locker(codeBlock->m_lock);
179 metadata.m_structure.set(vm, codeBlock, scope->structure(vm));
180 metadata.m_operand = slot.cachedOffset();
181 }
182}
183
184inline void tryCacheGetFromScopeGlobal(
185 JSGlobalObject* globalObject, CodeBlock* codeBlock, VM& vm, OpGetFromScope& bytecode, JSObject* scope, PropertySlot& slot, const Identifier& ident)
186{
187 auto& metadata = bytecode.metadata(codeBlock);
188 ResolveType resolveType = metadata.m_getPutInfo.resolveType();
189
190 switch (resolveType) {
191 case UnresolvedProperty:
192 case UnresolvedPropertyWithVarInjectionChecks: {
193 if (scope->isGlobalObject()) {
194 ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalPropertyWithVarInjectionChecks : GlobalProperty;
195 resolveType = newResolveType; // Allow below caching mechanism to kick in.
196 ConcurrentJSLocker locker(codeBlock->m_lock);
197 metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
198 break;
199 }
200 FALLTHROUGH;
201 }
202 case GlobalProperty:
203 case GlobalPropertyWithVarInjectionChecks: {
204 // Global Lexical Binding Epoch is changed. Update op_get_from_scope from GlobalProperty to GlobalLexicalVar.
205 if (scope->isGlobalLexicalEnvironment()) {
206 JSGlobalLexicalEnvironment* globalLexicalEnvironment = jsCast<JSGlobalLexicalEnvironment*>(scope);
207 ResolveType newResolveType = needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar;
208 SymbolTableEntry entry = globalLexicalEnvironment->symbolTable()->get(ident.impl());
209 ASSERT(!entry.isNull());
210 ConcurrentJSLocker locker(codeBlock->m_lock);
211 metadata.m_getPutInfo = GetPutInfo(metadata.m_getPutInfo.resolveMode(), newResolveType, metadata.m_getPutInfo.initializationMode());
212 metadata.m_watchpointSet = entry.watchpointSet();
213 metadata.m_operand = reinterpret_cast<uintptr_t>(globalLexicalEnvironment->variableAt(entry.scopeOffset()).slot());
214 return;
215 }
216 break;
217 }
218 default:
219 return;
220 }
221
222 // Covers implicit globals. Since they don't exist until they first execute, we didn't know how to cache them at compile time.
223 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks) {
224 ASSERT(scope == globalObject || globalObject->varInjectionWatchpoint()->hasBeenInvalidated());
225 if (slot.isCacheableValue() && slot.slotBase() == scope && scope == globalObject && scope->structure(vm)->propertyAccessesAreCacheable()) {
226 Structure* structure = scope->structure(vm);
227 {
228 ConcurrentJSLocker locker(codeBlock->m_lock);
229 metadata.m_structure.set(vm, codeBlock, structure);
230 metadata.m_operand = slot.cachedOffset();
231 }
232 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
233 }
234 }
235}
236
237inline bool canAccessArgumentIndexQuickly(JSObject& object, uint32_t index)
238{
239 switch (object.type()) {
240 case DirectArgumentsType: {
241 DirectArguments* directArguments = jsCast<DirectArguments*>(&object);
242 if (directArguments->isMappedArgumentInDFG(index))
243 return true;
244 break;
245 }
246 case ScopedArgumentsType: {
247 ScopedArguments* scopedArguments = jsCast<ScopedArguments*>(&object);
248 if (scopedArguments->isMappedArgumentInDFG(index))
249 return true;
250 break;
251 }
252 default:
253 break;
254 }
255 return false;
256}
257
258static ALWAYS_INLINE void putDirectWithReify(VM& vm, JSGlobalObject* globalObject, JSObject* baseObject, PropertyName propertyName, JSValue value, PutPropertySlot& slot, Structure** result = nullptr)
259{
260 auto scope = DECLARE_THROW_SCOPE(vm);
261 if (baseObject->inherits<JSFunction>(vm)) {
262 jsCast<JSFunction*>(baseObject)->reifyLazyPropertyIfNeeded(vm, globalObject, propertyName);
263 RETURN_IF_EXCEPTION(scope, void());
264 }
265 if (result)
266 *result = baseObject->structure(vm);
267 scope.release();
268 baseObject->putDirect(vm, propertyName, value, slot);
269}
270
271static ALWAYS_INLINE void putDirectAccessorWithReify(VM& vm, JSGlobalObject* globalObject, JSObject* baseObject, PropertyName propertyName, GetterSetter* accessor, unsigned attribute)
272{
273 auto scope = DECLARE_THROW_SCOPE(vm);
274 if (baseObject->inherits<JSFunction>(vm)) {
275 jsCast<JSFunction*>(baseObject)->reifyLazyPropertyIfNeeded(vm, globalObject, propertyName);
276 RETURN_IF_EXCEPTION(scope, void());
277 }
278 scope.release();
279 baseObject->putDirectAccessor(globalObject, propertyName, accessor, attribute);
280}
281
282inline JSArray* allocateNewArrayBuffer(VM& vm, Structure* structure, JSImmutableButterfly* immutableButterfly)
283{
284 JSGlobalObject* globalObject = structure->globalObject();
285 Structure* originalStructure = globalObject->originalArrayStructureForIndexingType(immutableButterfly->indexingMode());
286 ASSERT(originalStructure->indexingMode() == immutableButterfly->indexingMode());
287 ASSERT(isCopyOnWrite(immutableButterfly->indexingMode()));
288 ASSERT(!structure->outOfLineCapacity());
289
290 JSArray* result = JSArray::createWithButterfly(vm, nullptr, originalStructure, immutableButterfly->toButterfly());
291 // FIXME: This works but it's slow. If we cared enough about the perf when having a bad time then we could fix it.
292 if (UNLIKELY(originalStructure != structure)) {
293 ASSERT(hasSlowPutArrayStorage(structure->indexingMode()));
294 ASSERT(globalObject->isHavingABadTime());
295
296 result->switchToSlowPutArrayStorage(vm);
297 ASSERT(result->butterfly() != immutableButterfly->toButterfly());
298 ASSERT(!result->butterfly()->arrayStorage()->m_sparseMap.get());
299 ASSERT(result->structureID() == structure->id());
300 }
301
302 return result;
303}
304
305} // namespace CommonSlowPaths
306
307class CallFrame;
308struct Instruction;
309
310#define SLOW_PATH
311
312#define SLOW_PATH_DECL(name) \
313extern "C" SlowPathReturnType SLOW_PATH name(CallFrame* callFrame, const Instruction* pc)
314
315#define SLOW_PATH_HIDDEN_DECL(name) \
316SLOW_PATH_DECL(name) WTF_INTERNAL
317
318SLOW_PATH_HIDDEN_DECL(slow_path_call_arityCheck);
319SLOW_PATH_HIDDEN_DECL(slow_path_construct_arityCheck);
320SLOW_PATH_HIDDEN_DECL(slow_path_create_direct_arguments);
321SLOW_PATH_HIDDEN_DECL(slow_path_create_scoped_arguments);
322SLOW_PATH_HIDDEN_DECL(slow_path_create_cloned_arguments);
323SLOW_PATH_HIDDEN_DECL(slow_path_create_this);
324SLOW_PATH_HIDDEN_DECL(slow_path_enter);
325SLOW_PATH_HIDDEN_DECL(slow_path_get_callee);
326SLOW_PATH_HIDDEN_DECL(slow_path_to_this);
327SLOW_PATH_HIDDEN_DECL(slow_path_throw_tdz_error);
328SLOW_PATH_HIDDEN_DECL(slow_path_check_tdz);
329SLOW_PATH_HIDDEN_DECL(slow_path_throw_strict_mode_readonly_property_write_error);
330SLOW_PATH_HIDDEN_DECL(slow_path_not);
331SLOW_PATH_HIDDEN_DECL(slow_path_eq);
332SLOW_PATH_HIDDEN_DECL(slow_path_neq);
333SLOW_PATH_HIDDEN_DECL(slow_path_stricteq);
334SLOW_PATH_HIDDEN_DECL(slow_path_nstricteq);
335SLOW_PATH_HIDDEN_DECL(slow_path_less);
336SLOW_PATH_HIDDEN_DECL(slow_path_lesseq);
337SLOW_PATH_HIDDEN_DECL(slow_path_greater);
338SLOW_PATH_HIDDEN_DECL(slow_path_greatereq);
339SLOW_PATH_HIDDEN_DECL(slow_path_inc);
340SLOW_PATH_HIDDEN_DECL(slow_path_dec);
341SLOW_PATH_HIDDEN_DECL(slow_path_to_number);
342SLOW_PATH_HIDDEN_DECL(slow_path_to_numeric);
343SLOW_PATH_HIDDEN_DECL(slow_path_to_string);
344SLOW_PATH_HIDDEN_DECL(slow_path_to_object);
345SLOW_PATH_HIDDEN_DECL(slow_path_negate);
346SLOW_PATH_HIDDEN_DECL(slow_path_add);
347SLOW_PATH_HIDDEN_DECL(slow_path_mul);
348SLOW_PATH_HIDDEN_DECL(slow_path_sub);
349SLOW_PATH_HIDDEN_DECL(slow_path_div);
350SLOW_PATH_HIDDEN_DECL(slow_path_mod);
351SLOW_PATH_HIDDEN_DECL(slow_path_pow);
352SLOW_PATH_HIDDEN_DECL(slow_path_lshift);
353SLOW_PATH_HIDDEN_DECL(slow_path_rshift);
354SLOW_PATH_HIDDEN_DECL(slow_path_urshift);
355SLOW_PATH_HIDDEN_DECL(slow_path_unsigned);
356SLOW_PATH_HIDDEN_DECL(slow_path_bitnot);
357SLOW_PATH_HIDDEN_DECL(slow_path_bitand);
358SLOW_PATH_HIDDEN_DECL(slow_path_bitor);
359SLOW_PATH_HIDDEN_DECL(slow_path_bitxor);
360SLOW_PATH_HIDDEN_DECL(slow_path_typeof);
361SLOW_PATH_HIDDEN_DECL(slow_path_is_object);
362SLOW_PATH_HIDDEN_DECL(slow_path_is_object_or_null);
363SLOW_PATH_HIDDEN_DECL(slow_path_is_function);
364SLOW_PATH_HIDDEN_DECL(slow_path_in_by_id);
365SLOW_PATH_HIDDEN_DECL(slow_path_in_by_val);
366SLOW_PATH_HIDDEN_DECL(slow_path_del_by_val);
367SLOW_PATH_HIDDEN_DECL(slow_path_strcat);
368SLOW_PATH_HIDDEN_DECL(slow_path_to_primitive);
369SLOW_PATH_HIDDEN_DECL(slow_path_get_enumerable_length);
370SLOW_PATH_HIDDEN_DECL(slow_path_has_generic_property);
371SLOW_PATH_HIDDEN_DECL(slow_path_has_structure_property);
372SLOW_PATH_HIDDEN_DECL(slow_path_has_indexed_property);
373SLOW_PATH_HIDDEN_DECL(slow_path_get_direct_pname);
374SLOW_PATH_HIDDEN_DECL(slow_path_get_property_enumerator);
375SLOW_PATH_HIDDEN_DECL(slow_path_enumerator_structure_pname);
376SLOW_PATH_HIDDEN_DECL(slow_path_enumerator_generic_pname);
377SLOW_PATH_HIDDEN_DECL(slow_path_to_index_string);
378SLOW_PATH_HIDDEN_DECL(slow_path_profile_type_clear_log);
379SLOW_PATH_HIDDEN_DECL(slow_path_unreachable);
380SLOW_PATH_HIDDEN_DECL(slow_path_create_lexical_environment);
381SLOW_PATH_HIDDEN_DECL(slow_path_push_with_scope);
382SLOW_PATH_HIDDEN_DECL(slow_path_resolve_scope);
383SLOW_PATH_HIDDEN_DECL(slow_path_is_var_scope);
384SLOW_PATH_HIDDEN_DECL(slow_path_resolve_scope_for_hoisting_func_decl_in_eval);
385SLOW_PATH_HIDDEN_DECL(slow_path_create_promise);
386SLOW_PATH_HIDDEN_DECL(slow_path_create_generator);
387SLOW_PATH_HIDDEN_DECL(slow_path_create_async_generator);
388SLOW_PATH_HIDDEN_DECL(slow_path_create_rest);
389SLOW_PATH_HIDDEN_DECL(slow_path_get_by_id_with_this);
390SLOW_PATH_HIDDEN_DECL(slow_path_get_by_val_with_this);
391SLOW_PATH_HIDDEN_DECL(slow_path_put_by_id_with_this);
392SLOW_PATH_HIDDEN_DECL(slow_path_put_by_val_with_this);
393SLOW_PATH_HIDDEN_DECL(slow_path_define_data_property);
394SLOW_PATH_HIDDEN_DECL(slow_path_define_accessor_property);
395SLOW_PATH_HIDDEN_DECL(slow_path_throw_static_error);
396SLOW_PATH_HIDDEN_DECL(slow_path_new_promise);
397SLOW_PATH_HIDDEN_DECL(slow_path_new_generator);
398SLOW_PATH_HIDDEN_DECL(slow_path_new_array_with_spread);
399SLOW_PATH_HIDDEN_DECL(slow_path_new_array_buffer);
400SLOW_PATH_HIDDEN_DECL(slow_path_spread);
401
402using SlowPathFunction = SlowPathReturnType(SLOW_PATH *)(CallFrame*, const Instruction*);
403
404} // namespace JSC
405