1/*
2 * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "Repatch.h"
28
29#if ENABLE(JIT)
30
31#include "BinarySwitch.h"
32#include "CCallHelpers.h"
33#include "CallFrameShuffler.h"
34#include "DFGOperations.h"
35#include "DFGSpeculativeJIT.h"
36#include "DOMJITGetterSetter.h"
37#include "DirectArguments.h"
38#include "ExecutableBaseInlines.h"
39#include "FTLThunks.h"
40#include "FullCodeOrigin.h"
41#include "FunctionCodeBlock.h"
42#include "GCAwareJITStubRoutine.h"
43#include "GetterSetter.h"
44#include "GetterSetterAccessCase.h"
45#include "ICStats.h"
46#include "InlineAccess.h"
47#include "InstanceOfAccessCase.h"
48#include "IntrinsicGetterAccessCase.h"
49#include "JIT.h"
50#include "JITInlines.h"
51#include "JSCInlines.h"
52#include "JSModuleNamespaceObject.h"
53#include "JSWebAssembly.h"
54#include "LinkBuffer.h"
55#include "ModuleNamespaceAccessCase.h"
56#include "PolymorphicAccess.h"
57#include "ScopedArguments.h"
58#include "ScratchRegisterAllocator.h"
59#include "StackAlignment.h"
60#include "StructureRareDataInlines.h"
61#include "StructureStubClearingWatchpoint.h"
62#include "StructureStubInfo.h"
63#include "SuperSampler.h"
64#include "ThunkGenerators.h"
65#include <wtf/CommaPrinter.h>
66#include <wtf/ListDump.h>
67#include <wtf/StringPrintStream.h>
68
69namespace JSC {
70
71static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call)
72{
73 FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
74#if ENABLE(FTL_JIT)
75 if (codeBlock->jitType() == JITCode::FTLJIT) {
76 MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>();
77 return codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>();
78 }
79#else
80 UNUSED_PARAM(codeBlock);
81#endif // ENABLE(FTL_JIT)
82 return target.retagged<CFunctionPtrTag>();
83}
84
85void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
86{
87#if ENABLE(FTL_JIT)
88 if (codeBlock->jitType() == JITCode::FTLJIT) {
89 VM& vm = *codeBlock->vm();
90 FTL::Thunks& thunks = *vm.ftlThunks;
91 FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
92 auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>());
93 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
94 key = key.withCallTarget(newCalleeFunction);
95 MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(key).retaggedCode<OperationPtrTag>()));
96 return;
97 }
98#else // ENABLE(FTL_JIT)
99 UNUSED_PARAM(codeBlock);
100#endif // ENABLE(FTL_JIT)
101 MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>());
102}
103
104enum InlineCacheAction {
105 GiveUpOnCache,
106 RetryCacheLater,
107 AttemptToCache
108};
109
110static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
111{
112 Structure* structure = cell->structure(vm);
113
114 TypeInfo typeInfo = structure->typeInfo();
115 if (typeInfo.prohibitsPropertyCaching())
116 return GiveUpOnCache;
117
118 if (structure->isUncacheableDictionary()) {
119 if (structure->hasBeenFlattenedBefore())
120 return GiveUpOnCache;
121 // Flattening could have changed the offset, so return early for another try.
122 asObject(cell)->flattenDictionaryObject(vm);
123 return RetryCacheLater;
124 }
125
126 if (!structure->propertyAccessesAreCacheable())
127 return GiveUpOnCache;
128
129 return AttemptToCache;
130}
131
132static bool forceICFailure(ExecState*)
133{
134 return Options::forceICFailure();
135}
136
137ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
138{
139 if (result.shouldResetStubAndFireWatchpoints()) {
140 result.fireWatchpoints(vm);
141 stubInfo.reset(codeBlock);
142 }
143}
144
145inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByIdFunction(GetByIDKind kind)
146{
147 switch (kind) {
148 case GetByIDKind::Normal:
149 return operationGetByIdOptimize;
150 case GetByIDKind::WithThis:
151 return operationGetByIdWithThisOptimize;
152 case GetByIDKind::Try:
153 return operationTryGetByIdOptimize;
154 case GetByIDKind::Direct:
155 return operationGetByIdDirectOptimize;
156 }
157 ASSERT_NOT_REACHED();
158 return operationGetById;
159}
160
161inline FunctionPtr<CFunctionPtrTag> appropriateGetByIdFunction(GetByIDKind kind)
162{
163 switch (kind) {
164 case GetByIDKind::Normal:
165 return operationGetById;
166 case GetByIDKind::WithThis:
167 return operationGetByIdWithThis;
168 case GetByIDKind::Try:
169 return operationTryGetById;
170 case GetByIDKind::Direct:
171 return operationGetByIdDirect;
172 }
173 ASSERT_NOT_REACHED();
174 return operationGetById;
175}
176
177static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
178{
179 VM& vm = exec->vm();
180 AccessGenerationResult result;
181
182 {
183 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
184
185 if (forceICFailure(exec))
186 return GiveUpOnCache;
187
188 // FIXME: Cache property access for immediates.
189 if (!baseValue.isCell())
190 return GiveUpOnCache;
191 JSCell* baseCell = baseValue.asCell();
192
193 CodeBlock* codeBlock = exec->codeBlock();
194
195 std::unique_ptr<AccessCase> newCase;
196
197 if (propertyName == vm.propertyNames->length) {
198 if (isJSArray(baseCell)) {
199 if (stubInfo.cacheType == CacheType::Unset
200 && slot.slotBase() == baseCell
201 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
202
203 bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
204 if (generatedCodeInline) {
205 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
206 stubInfo.initArrayLength();
207 return RetryCacheLater;
208 }
209 }
210
211 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
212 } else if (isJSString(baseCell)) {
213 if (stubInfo.cacheType == CacheType::Unset && InlineAccess::isCacheableStringLength(stubInfo)) {
214 bool generatedCodeInline = InlineAccess::generateStringLength(stubInfo);
215 if (generatedCodeInline) {
216 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
217 stubInfo.initStringLength();
218 return RetryCacheLater;
219 }
220 }
221
222 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
223 }
224 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
225 // If there were overrides, then we can handle this as a normal property load! Guarding
226 // this with such a check enables us to add an IC case for that load if needed.
227 if (!arguments->overrodeThings())
228 newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
229 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
230 // Ditto.
231 if (!arguments->overrodeThings())
232 newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
233 }
234 }
235
236 if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
237 if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
238 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
239 }
240
241 if (!newCase) {
242 if (!slot.isCacheable() && !slot.isUnset())
243 return GiveUpOnCache;
244
245 ObjectPropertyConditionSet conditionSet;
246 Structure* structure = baseCell->structure(vm);
247
248 bool loadTargetFromProxy = false;
249 if (baseCell->type() == PureForwardingProxyType) {
250 baseValue = jsCast<JSProxy*>(baseCell)->target();
251 baseCell = baseValue.asCell();
252 structure = baseCell->structure(vm);
253 loadTargetFromProxy = true;
254 }
255
256 InlineCacheAction action = actionForCell(vm, baseCell);
257 if (action != AttemptToCache)
258 return action;
259
260 // Optimize self access.
261 if (stubInfo.cacheType == CacheType::Unset
262 && slot.isCacheableValue()
263 && slot.slotBase() == baseValue
264 && !slot.watchpointSet()
265 && !structure->needImpurePropertyWatchpoint()
266 && !loadTargetFromProxy) {
267
268 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
269 if (generatedCodeInline) {
270 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
271 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
272 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
273 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
274 return RetryCacheLater;
275 }
276 }
277
278 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
279
280 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
281
282 if (slot.isUnset() || slot.slotBase() != baseValue) {
283 if (structure->typeInfo().prohibitsPropertyCaching())
284 return GiveUpOnCache;
285
286 if (structure->isDictionary()) {
287 if (structure->hasBeenFlattenedBefore())
288 return GiveUpOnCache;
289 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
290 }
291
292 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
293 return GiveUpOnCache;
294
295 // If a kind is GetByIDKind::Direct, we do not need to investigate prototype chains further.
296 // Cacheability just depends on the head structure.
297 if (kind != GetByIDKind::Direct) {
298 bool usesPolyProto;
299 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
300 if (!prototypeAccessChain) {
301 // It's invalid to access this prototype property.
302 return GiveUpOnCache;
303 }
304
305 if (!usesPolyProto) {
306 // We use ObjectPropertyConditionSet instead for faster accesses.
307 prototypeAccessChain = nullptr;
308
309 // FIXME: Maybe this `if` should be inside generateConditionsForPropertyBlah.
310 // https://bugs.webkit.org/show_bug.cgi?id=185215
311 if (slot.isUnset()) {
312 conditionSet = generateConditionsForPropertyMiss(
313 vm, codeBlock, exec, structure, propertyName.impl());
314 } else if (!slot.isCacheableCustom()) {
315 conditionSet = generateConditionsForPrototypePropertyHit(
316 vm, codeBlock, exec, structure, slot.slotBase(),
317 propertyName.impl());
318 } else {
319 conditionSet = generateConditionsForPrototypePropertyHitCustom(
320 vm, codeBlock, exec, structure, slot.slotBase(),
321 propertyName.impl());
322 }
323
324 if (!conditionSet.isValid())
325 return GiveUpOnCache;
326 }
327 }
328
329 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
330 }
331
332 JSFunction* getter = nullptr;
333 if (slot.isCacheableGetter())
334 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
335
336 Optional<DOMAttributeAnnotation> domAttribute;
337 if (slot.isCacheableCustom() && slot.domAttribute())
338 domAttribute = slot.domAttribute();
339
340 if (kind == GetByIDKind::Try) {
341 AccessCase::AccessType type;
342 if (slot.isCacheableValue())
343 type = AccessCase::Load;
344 else if (slot.isUnset())
345 type = AccessCase::Miss;
346 else if (slot.isCacheableGetter())
347 type = AccessCase::GetGetter;
348 else
349 RELEASE_ASSERT_NOT_REACHED();
350
351 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
352 } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
353 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
354 else {
355 if (slot.isCacheableValue() || slot.isUnset()) {
356 newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
357 offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
358 } else {
359 AccessCase::AccessType type;
360 if (slot.isCacheableGetter())
361 type = AccessCase::Getter;
362 else if (slot.attributes() & PropertyAttribute::CustomAccessor)
363 type = AccessCase::CustomAccessorGetter;
364 else
365 type = AccessCase::CustomValueGetter;
366
367 if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
368 return GiveUpOnCache;
369
370 newCase = GetterSetterAccessCase::create(
371 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
372 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
373 slot.isCacheableCustom() && slot.slotBase() != baseValue ? slot.slotBase() : nullptr,
374 domAttribute, WTFMove(prototypeAccessChain));
375 }
376 }
377 }
378
379 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName));
380
381 result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
382
383 if (result.generatedSomeCode()) {
384 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
385
386 RELEASE_ASSERT(result.code());
387 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
388 }
389 }
390
391 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
392
393 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
394}
395
396void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
397{
398 SuperSamplerScope superSamplerScope(false);
399
400 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
401 CodeBlock* codeBlock = exec->codeBlock();
402 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind));
403 }
404}
405
406static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
407{
408 if (slot.isStrictMode()) {
409 if (putKind == Direct)
410 return operationPutByIdDirectStrict;
411 return operationPutByIdStrict;
412 }
413 if (putKind == Direct)
414 return operationPutByIdDirectNonStrict;
415 return operationPutByIdNonStrict;
416}
417
418static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
419{
420 if (slot.isStrictMode()) {
421 if (putKind == Direct)
422 return operationPutByIdDirectStrictOptimize;
423 return operationPutByIdStrictOptimize;
424 }
425 if (putKind == Direct)
426 return operationPutByIdDirectNonStrictOptimize;
427 return operationPutByIdNonStrictOptimize;
428}
429
430static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
431{
432 VM& vm = exec->vm();
433 AccessGenerationResult result;
434 {
435 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
436
437 if (forceICFailure(exec))
438 return GiveUpOnCache;
439
440 CodeBlock* codeBlock = exec->codeBlock();
441
442 if (!baseValue.isCell())
443 return GiveUpOnCache;
444
445 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
446 return GiveUpOnCache;
447
448 // FIXME: We should try to do something smarter here...
449 if (isCopyOnWrite(structure->indexingMode()))
450 return GiveUpOnCache;
451 // We can't end up storing to a CoW on the prototype since it shouldn't own properties.
452 ASSERT(!isCopyOnWrite(slot.base()->indexingMode()));
453
454 if (!structure->propertyAccessesAreCacheable())
455 return GiveUpOnCache;
456
457 std::unique_ptr<AccessCase> newCase;
458 JSCell* baseCell = baseValue.asCell();
459
460 if (slot.base() == baseValue && slot.isCacheablePut()) {
461 if (slot.type() == PutPropertySlot::ExistingProperty) {
462 // This assert helps catch bugs if we accidentally forget to disable caching
463 // when we transition then store to an existing property. This is common among
464 // paths that reify lazy properties. If we reify a lazy property and forget
465 // to disable caching, we may come down this path. The Replace IC does not
466 // know how to model these types of structure transitions (or any structure
467 // transition for that matter).
468 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
469
470 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
471
472 if (stubInfo.cacheType == CacheType::Unset
473 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
474 && !structure->needImpurePropertyWatchpoint()) {
475
476 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
477 if (generatedCodeInline) {
478 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
479 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
480 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
481 return RetryCacheLater;
482 }
483 }
484
485 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
486 } else {
487 ASSERT(slot.type() == PutPropertySlot::NewProperty);
488
489 if (!structure->isObject())
490 return GiveUpOnCache;
491
492 if (structure->isDictionary()) {
493 if (structure->hasBeenFlattenedBefore())
494 return GiveUpOnCache;
495 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
496 }
497
498 PropertyOffset offset;
499 Structure* newStructure =
500 Structure::addPropertyTransitionToExistingStructureConcurrently(
501 structure, ident.impl(), 0, offset);
502 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
503 return GiveUpOnCache;
504
505 ASSERT(newStructure->previousID() == structure);
506 ASSERT(!newStructure->isDictionary());
507 ASSERT(newStructure->isObject());
508
509 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
510 ObjectPropertyConditionSet conditionSet;
511 if (putKind == NotDirect) {
512 bool usesPolyProto;
513 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
514 if (!prototypeAccessChain) {
515 // It's invalid to access this prototype property.
516 return GiveUpOnCache;
517 }
518
519 if (!usesPolyProto) {
520 prototypeAccessChain = nullptr;
521 conditionSet =
522 generateConditionsForPropertySetterMiss(
523 vm, codeBlock, exec, newStructure, ident.impl());
524 if (!conditionSet.isValid())
525 return GiveUpOnCache;
526 }
527
528 }
529
530 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
531 }
532 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
533 if (slot.isCacheableCustom()) {
534 ObjectPropertyConditionSet conditionSet;
535 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
536
537 if (slot.base() != baseValue) {
538 bool usesPolyProto;
539 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
540 if (!prototypeAccessChain) {
541 // It's invalid to access this prototype property.
542 return GiveUpOnCache;
543 }
544
545 if (!usesPolyProto) {
546 prototypeAccessChain = nullptr;
547 conditionSet =
548 generateConditionsForPrototypePropertyHitCustom(
549 vm, codeBlock, exec, structure, slot.base(), ident.impl());
550 if (!conditionSet.isValid())
551 return GiveUpOnCache;
552 }
553 }
554
555 newCase = GetterSetterAccessCase::create(
556 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
557 conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base() != baseValue ? slot.base() : nullptr);
558 } else {
559 ObjectPropertyConditionSet conditionSet;
560 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
561 PropertyOffset offset = slot.cachedOffset();
562
563 if (slot.base() != baseValue) {
564 bool usesPolyProto;
565 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
566 if (!prototypeAccessChain) {
567 // It's invalid to access this prototype property.
568 return GiveUpOnCache;
569 }
570
571 if (!usesPolyProto) {
572 prototypeAccessChain = nullptr;
573 conditionSet =
574 generateConditionsForPrototypePropertyHit(
575 vm, codeBlock, exec, structure, slot.base(), ident.impl());
576 if (!conditionSet.isValid())
577 return GiveUpOnCache;
578
579 PropertyOffset conditionSetOffset = conditionSet.slotBaseCondition().offset();
580 if (UNLIKELY(offset != conditionSetOffset))
581 CRASH_WITH_INFO(offset, conditionSetOffset, slot.base()->type(), baseCell->type(), conditionSet.size());
582 }
583
584 }
585
586 newCase = GetterSetterAccessCase::create(
587 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
588 }
589 }
590
591 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
592
593 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
594
595 if (result.generatedSomeCode()) {
596 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
597
598 RELEASE_ASSERT(result.code());
599
600 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
601 }
602 }
603
604 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
605
606 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
607}
608
609void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
610{
611 SuperSamplerScope superSamplerScope(false);
612
613 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
614 CodeBlock* codeBlock = exec->codeBlock();
615 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
616 }
617}
618
619static InlineCacheAction tryCacheInByID(
620 ExecState* exec, JSObject* base, const Identifier& ident,
621 bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
622{
623 VM& vm = exec->vm();
624 AccessGenerationResult result;
625
626 {
627 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, vm.heap);
628 if (forceICFailure(exec))
629 return GiveUpOnCache;
630
631 if (!base->structure(vm)->propertyAccessesAreCacheable() || (!wasFound && !base->structure(vm)->propertyAccessesAreCacheableForAbsence()))
632 return GiveUpOnCache;
633
634 if (wasFound) {
635 if (!slot.isCacheable())
636 return GiveUpOnCache;
637 }
638
639 CodeBlock* codeBlock = exec->codeBlock();
640 Structure* structure = base->structure(vm);
641
642 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
643 ObjectPropertyConditionSet conditionSet;
644 if (wasFound) {
645 InlineCacheAction action = actionForCell(vm, base);
646 if (action != AttemptToCache)
647 return action;
648
649 // Optimize self access.
650 if (stubInfo.cacheType == CacheType::Unset
651 && slot.isCacheableValue()
652 && slot.slotBase() == base
653 && !slot.watchpointSet()
654 && !structure->needImpurePropertyWatchpoint()) {
655 bool generatedCodeInline = InlineAccess::generateSelfInAccess(stubInfo, structure);
656 if (generatedCodeInline) {
657 LOG_IC((ICEvent::InByIdSelfPatch, structure->classInfo(), ident));
658 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
659 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
660 stubInfo.initInByIdSelf(codeBlock, structure, slot.cachedOffset());
661 return RetryCacheLater;
662 }
663 }
664
665 if (slot.slotBase() != base) {
666 bool usesPolyProto;
667 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
668 if (!prototypeAccessChain) {
669 // It's invalid to access this prototype property.
670 return GiveUpOnCache;
671 }
672 if (!usesPolyProto) {
673 prototypeAccessChain = nullptr;
674 conditionSet = generateConditionsForPrototypePropertyHit(
675 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
676 }
677 }
678 } else {
679 bool usesPolyProto;
680 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
681 if (!prototypeAccessChain) {
682 // It's invalid to access this prototype property.
683 return GiveUpOnCache;
684 }
685
686 if (!usesPolyProto) {
687 prototypeAccessChain = nullptr;
688 conditionSet = generateConditionsForPropertyMiss(
689 vm, codeBlock, exec, structure, ident.impl());
690 }
691 }
692 if (!conditionSet.isValid())
693 return GiveUpOnCache;
694
695 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
696
697 std::unique_ptr<AccessCase> newCase = AccessCase::create(
698 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, wasFound ? slot.cachedOffset() : invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
699
700 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
701
702 if (result.generatedSomeCode()) {
703 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
704
705 RELEASE_ASSERT(result.code());
706 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
707 }
708 }
709
710 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
711
712 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
713}
714
715void repatchInByID(ExecState* exec, JSObject* baseObject, const Identifier& propertyName, bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
716{
717 SuperSamplerScope superSamplerScope(false);
718
719 if (tryCacheInByID(exec, baseObject, propertyName, wasFound, slot, stubInfo) == GiveUpOnCache) {
720 CodeBlock* codeBlock = exec->codeBlock();
721 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInById);
722 }
723}
724
725static InlineCacheAction tryCacheInstanceOf(
726 ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
727 bool wasFound)
728{
729 VM& vm = exec->vm();
730 CodeBlock* codeBlock = exec->codeBlock();
731 AccessGenerationResult result;
732
733 RELEASE_ASSERT(valueValue.isCell()); // shouldConsiderCaching rejects non-cells.
734
735 if (forceICFailure(exec))
736 return GiveUpOnCache;
737
738 {
739 GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap);
740
741 JSCell* value = valueValue.asCell();
742 Structure* structure = value->structure(vm);
743 std::unique_ptr<AccessCase> newCase;
744 JSObject* prototype = jsDynamicCast<JSObject*>(vm, prototypeValue);
745 if (prototype) {
746 if (!jsDynamicCast<JSObject*>(vm, value)) {
747 newCase = InstanceOfAccessCase::create(
748 vm, codeBlock, AccessCase::InstanceOfMiss, structure, ObjectPropertyConditionSet(),
749 prototype);
750 } else if (structure->prototypeQueriesAreCacheable()) {
751 // FIXME: Teach this to do poly proto.
752 // https://bugs.webkit.org/show_bug.cgi?id=185663
753
754 ObjectPropertyConditionSet conditionSet = generateConditionsForInstanceOf(
755 vm, codeBlock, exec, structure, prototype, wasFound);
756
757 if (conditionSet.isValid()) {
758 newCase = InstanceOfAccessCase::create(
759 vm, codeBlock,
760 wasFound ? AccessCase::InstanceOfHit : AccessCase::InstanceOfMiss,
761 structure, conditionSet, prototype);
762 }
763 }
764 }
765
766 if (!newCase)
767 newCase = AccessCase::create(vm, codeBlock, AccessCase::InstanceOfGeneric);
768
769 LOG_IC((ICEvent::InstanceOfAddAccessCase, structure->classInfo(), Identifier()));
770
771 result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), WTFMove(newCase));
772
773 if (result.generatedSomeCode()) {
774 LOG_IC((ICEvent::InstanceOfReplaceWithJump, structure->classInfo(), Identifier()));
775
776 RELEASE_ASSERT(result.code());
777
778 MacroAssembler::repatchJump(
779 stubInfo.patchableJump(),
780 CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
781 }
782 }
783
784 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result);
785
786 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
787}
788
789void repatchInstanceOf(
790 ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
791 bool wasFound)
792{
793 SuperSamplerScope superSamplerScope(false);
794 if (tryCacheInstanceOf(exec, valueValue, prototypeValue, stubInfo, wasFound) == GiveUpOnCache)
795 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationInstanceOfGeneric);
796}
797
798static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
799{
800 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code()));
801}
802
803static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
804{
805 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator).retagged<JITStubRoutinePtrTag>());
806}
807
808static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
809{
810 MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo);
811 linkSlowFor(vm, callLinkInfo, virtualThunk);
812 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
813}
814
815static JSCell* webAssemblyOwner(JSCell* callee)
816{
817#if ENABLE(WEBASSEMBLY)
818 // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
819 return jsCast<WebAssemblyToJSCallee*>(callee)->module();
820#else
821 UNUSED_PARAM(callee);
822 RELEASE_ASSERT_NOT_REACHED();
823 return nullptr;
824#endif // ENABLE(WEBASSEMBLY)
825}
826
827void linkFor(
828 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
829 JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
830{
831 ASSERT(!callLinkInfo.stub());
832
833 CallFrame* callerFrame = exec->callerFrame();
834 // Our caller must have a cell for a callee. When calling
835 // this from Wasm, we ensure the callee is a cell.
836 ASSERT(callerFrame->callee().isCell());
837
838 VM& vm = callerFrame->vm();
839 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
840
841 // WebAssembly -> JS stubs don't have a valid CodeBlock.
842 JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
843 ASSERT(owner);
844
845 ASSERT(!callLinkInfo.isLinked());
846 callLinkInfo.setCallee(vm, owner, callee);
847 MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), callee);
848 callLinkInfo.setLastSeenCallee(vm, owner, callee);
849 if (shouldDumpDisassemblyFor(callerCodeBlock))
850 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
851
852 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
853
854 if (calleeCodeBlock)
855 calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
856
857 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
858 linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
859 return;
860 }
861
862 linkSlowFor(&vm, callLinkInfo);
863}
864
865void linkDirectFor(
866 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
867 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
868{
869 ASSERT(!callLinkInfo.stub());
870
871 CodeBlock* callerCodeBlock = exec->codeBlock();
872
873 VM* vm = callerCodeBlock->vm();
874
875 ASSERT(!callLinkInfo.isLinked());
876 callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
877 if (shouldDumpDisassemblyFor(callerCodeBlock))
878 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
879
880 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
881 MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
882 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
883
884 if (calleeCodeBlock)
885 calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
886}
887
888void linkSlowFor(
889 ExecState* exec, CallLinkInfo& callLinkInfo)
890{
891 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
892 VM* vm = callerCodeBlock->vm();
893
894 linkSlowFor(vm, callLinkInfo);
895}
896
897static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
898{
899 if (callLinkInfo.isDirect()) {
900 callLinkInfo.clearCodeBlock();
901 if (!callLinkInfo.clearedByJettison()) {
902 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
903 MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
904 else
905 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
906 }
907 } else {
908 if (!callLinkInfo.clearedByJettison()) {
909 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
910 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
911 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
912 linkSlowFor(vm, callLinkInfo, codeRef);
913 MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), nullptr);
914 }
915 callLinkInfo.clearCallee();
916 }
917 callLinkInfo.clearSeen();
918 callLinkInfo.clearStub();
919 callLinkInfo.clearSlowStub();
920 if (callLinkInfo.isOnList())
921 callLinkInfo.remove();
922}
923
924void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
925{
926 if (Options::dumpDisassembly())
927 dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
928
929 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>());
930}
931
932void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
933{
934 CallFrame* callerFrame = exec->callerFrame();
935 VM& vm = callerFrame->vm();
936 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
937
938 if (shouldDumpDisassemblyFor(callerCodeBlock))
939 dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
940
941 MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, callLinkInfo);
942 revertCall(&vm, callLinkInfo, virtualThunk);
943 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
944 callLinkInfo.setClearedByVirtual();
945}
946
947namespace {
948struct CallToCodePtr {
949 CCallHelpers::Call call;
950 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
951};
952} // annonymous namespace
953
954void linkPolymorphicCall(
955 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
956{
957 RELEASE_ASSERT(callLinkInfo.allowStubs());
958
959 if (!newVariant) {
960 linkVirtualFor(exec, callLinkInfo);
961 return;
962 }
963
964 CallFrame* callerFrame = exec->callerFrame();
965
966 // Our caller must be have a cell for a callee. When calling
967 // this from Wasm, we ensure the callee is a cell.
968 ASSERT(callerFrame->callee().isCell());
969
970 VM& vm = callerFrame->vm();
971 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
972 bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
973
974 // WebAssembly -> JS stubs don't have a valid CodeBlock.
975 JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
976 ASSERT(owner);
977
978 CallVariantList list;
979 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
980 list = stub->variants();
981 else if (JSObject* oldCallee = callLinkInfo.callee())
982 list = CallVariantList { CallVariant(oldCallee) };
983
984 list = variantListWithVariant(list, newVariant);
985
986 // If there are any closure calls then it makes sense to treat all of them as closure calls.
987 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
988 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
989 bool isClosureCall = false;
990 for (CallVariant variant : list) {
991 if (variant.isClosureCall()) {
992 list = despecifiedVariantList(list);
993 isClosureCall = true;
994 break;
995 }
996 }
997
998 if (isClosureCall)
999 callLinkInfo.setHasSeenClosure();
1000
1001 Vector<PolymorphicCallCase> callCases;
1002
1003 // Figure out what our cases are.
1004 for (CallVariant variant : list) {
1005 CodeBlock* codeBlock = nullptr;
1006 if (variant.executable() && !variant.executable()->isHostFunction()) {
1007 ExecutableBase* executable = variant.executable();
1008 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1009 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
1010 // assume that it's better for this whole thing to be a virtual call.
1011 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
1012 linkVirtualFor(exec, callLinkInfo);
1013 return;
1014 }
1015 }
1016
1017 callCases.append(PolymorphicCallCase(variant, codeBlock));
1018 }
1019
1020 // If we are over the limit, just use a normal virtual call.
1021 unsigned maxPolymorphicCallVariantListSize;
1022 if (isWebAssembly)
1023 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
1024 else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1025 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1026 else
1027 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1028
1029 if (list.size() > maxPolymorphicCallVariantListSize) {
1030 linkVirtualFor(exec, callLinkInfo);
1031 return;
1032 }
1033
1034 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1035
1036 CCallHelpers stubJit(callerCodeBlock);
1037
1038 CCallHelpers::JumpList slowPath;
1039
1040 std::unique_ptr<CallFrameShuffler> frameShuffler;
1041 if (callLinkInfo.frameShuffleData()) {
1042 ASSERT(callLinkInfo.isTailCall());
1043 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
1044#if USE(JSVALUE32_64)
1045 // We would have already checked that the callee is a cell, and we can
1046 // use the additional register this buys us.
1047 frameShuffler->assumeCalleeIsCell();
1048#endif
1049 frameShuffler->lockGPR(calleeGPR);
1050 }
1051 GPRReg comparisonValueGPR;
1052
1053 if (isClosureCall) {
1054 GPRReg scratchGPR;
1055 if (frameShuffler)
1056 scratchGPR = frameShuffler->acquireGPR();
1057 else
1058 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
1059 // Verify that we have a function and stash the executable in scratchGPR.
1060
1061#if USE(JSVALUE64)
1062 slowPath.append(stubJit.branchIfNotCell(calleeGPR));
1063#else
1064 // We would have already checked that the callee is a cell.
1065#endif
1066
1067 // FIXME: We could add a fast path for InternalFunction with closure call.
1068 slowPath.append(stubJit.branchIfNotFunction(calleeGPR));
1069
1070 stubJit.loadPtr(
1071 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1072 scratchGPR);
1073
1074 comparisonValueGPR = scratchGPR;
1075 } else
1076 comparisonValueGPR = calleeGPR;
1077
1078 Vector<int64_t> caseValues(callCases.size());
1079 Vector<CallToCodePtr> calls(callCases.size());
1080 UniqueArray<uint32_t> fastCounts;
1081
1082 if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
1083 fastCounts = makeUniqueArray<uint32_t>(callCases.size());
1084
1085 for (size_t i = 0; i < callCases.size(); ++i) {
1086 if (fastCounts)
1087 fastCounts[i] = 0;
1088
1089 CallVariant variant = callCases[i].variant();
1090 int64_t newCaseValue = 0;
1091 if (isClosureCall) {
1092 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1093 // FIXME: We could add a fast path for InternalFunction with closure call.
1094 // https://bugs.webkit.org/show_bug.cgi?id=179311
1095 if (!newCaseValue)
1096 continue;
1097 } else {
1098 if (auto* function = variant.function())
1099 newCaseValue = bitwise_cast<intptr_t>(function);
1100 else
1101 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
1102 }
1103
1104 if (!ASSERT_DISABLED) {
1105 for (size_t j = 0; j < i; ++j) {
1106 if (caseValues[j] != newCaseValue)
1107 continue;
1108
1109 dataLog("ERROR: Attempt to add duplicate case value.\n");
1110 dataLog("Existing case values: ");
1111 CommaPrinter comma;
1112 for (size_t k = 0; k < i; ++k)
1113 dataLog(comma, caseValues[k]);
1114 dataLog("\n");
1115 dataLog("Attempting to add: ", newCaseValue, "\n");
1116 dataLog("Variant list: ", listDump(callCases), "\n");
1117 RELEASE_ASSERT_NOT_REACHED();
1118 }
1119 }
1120
1121 caseValues[i] = newCaseValue;
1122 }
1123
1124 GPRReg fastCountsBaseGPR;
1125 if (frameShuffler)
1126 fastCountsBaseGPR = frameShuffler->acquireGPR();
1127 else {
1128 fastCountsBaseGPR =
1129 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1130 }
1131 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1132 if (!frameShuffler && callLinkInfo.isTailCall())
1133 stubJit.emitRestoreCalleeSaves();
1134 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1135 CCallHelpers::JumpList done;
1136 while (binarySwitch.advance(stubJit)) {
1137 size_t caseIndex = binarySwitch.caseIndex();
1138
1139 CallVariant variant = callCases[caseIndex].variant();
1140
1141 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1142 if (variant.executable()) {
1143 ASSERT(variant.executable()->hasJITCodeForCall());
1144
1145 codePtr = jsToWasmICCodePtr(vm, callLinkInfo.specializationKind(), variant.function());
1146 if (!codePtr)
1147 codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1148 } else {
1149 ASSERT(variant.internalFunction());
1150 codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1151 }
1152
1153 if (fastCounts) {
1154 stubJit.add32(
1155 CCallHelpers::TrustedImm32(1),
1156 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1157 }
1158 if (frameShuffler) {
1159 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1160 calls[caseIndex].call = stubJit.nearTailCall();
1161 } else if (callLinkInfo.isTailCall()) {
1162 stubJit.prepareForTailCallSlow();
1163 calls[caseIndex].call = stubJit.nearTailCall();
1164 } else
1165 calls[caseIndex].call = stubJit.nearCall();
1166 calls[caseIndex].codePtr = codePtr;
1167 done.append(stubJit.jump());
1168 }
1169
1170 slowPath.link(&stubJit);
1171 binarySwitch.fallThrough().link(&stubJit);
1172
1173 if (frameShuffler) {
1174 frameShuffler->releaseGPR(calleeGPR);
1175 frameShuffler->releaseGPR(comparisonValueGPR);
1176 frameShuffler->releaseGPR(fastCountsBaseGPR);
1177#if USE(JSVALUE32_64)
1178 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1179#else
1180 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1181#endif
1182 frameShuffler->prepareForSlowPath();
1183 } else {
1184 stubJit.move(calleeGPR, GPRInfo::regT0);
1185#if USE(JSVALUE32_64)
1186 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1187#endif
1188 }
1189 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1190 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4);
1191
1192 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1193 AssemblyHelpers::Jump slow = stubJit.jump();
1194
1195 LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1196 if (patchBuffer.didFailToAllocate()) {
1197 linkVirtualFor(exec, callLinkInfo);
1198 return;
1199 }
1200
1201 RELEASE_ASSERT(callCases.size() == calls.size());
1202 for (CallToCodePtr callToCodePtr : calls) {
1203#if CPU(ARM_THUMB2)
1204 // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1205 // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1206 bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1207 void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress();
1208 patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::createFromExecutableAddress(target)));
1209#else
1210 patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr));
1211#endif
1212 }
1213 if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1214 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1215 else
1216 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1217 patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1218
1219 auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1220 FINALIZE_CODE_FOR(
1221 callerCodeBlock, patchBuffer, JITStubRoutinePtrTag,
1222 "Polymorphic call stub for %s, return point %p, targets %s",
1223 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1224 toCString(listDump(callCases)).data()),
1225 vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1226 WTFMove(fastCounts)));
1227
1228 MacroAssembler::replaceWithJump(
1229 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1230 CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code()));
1231 // The original slow path is unreachable on 64-bits, but still
1232 // reachable on 32-bits since a non-cell callee will always
1233 // trigger the slow path
1234 linkSlowFor(&vm, callLinkInfo);
1235
1236 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1237 // that it's no longer on stack.
1238 callLinkInfo.setStub(WTFMove(stubRoutine));
1239
1240 // The call link info no longer has a call cache apart from the jump to the polymorphic call
1241 // stub.
1242 if (callLinkInfo.isOnList())
1243 callLinkInfo.remove();
1244}
1245
1246void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1247{
1248 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1249 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1250}
1251
1252void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1253{
1254 V_JITOperation_ESsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_ESsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1255 V_JITOperation_ESsiJJI optimizedFunction;
1256 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1257 optimizedFunction = operationPutByIdStrictOptimize;
1258 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1259 optimizedFunction = operationPutByIdNonStrictOptimize;
1260 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1261 optimizedFunction = operationPutByIdDirectStrictOptimize;
1262 else {
1263 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1264 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1265 }
1266
1267 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1268 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1269}
1270
1271static void resetPatchableJump(StructureStubInfo& stubInfo)
1272{
1273 MacroAssembler::repatchJump(stubInfo.patchableJump(), stubInfo.slowPathStartLocation());
1274}
1275
1276void resetInByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1277{
1278 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
1279 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1280}
1281
1282void resetInstanceOf(StructureStubInfo& stubInfo)
1283{
1284 resetPatchableJump(stubInfo);
1285}
1286
1287MacroAssemblerCodePtr<JSEntryPtrTag> jsToWasmICCodePtr(VM& vm, CodeSpecializationKind kind, JSObject* callee)
1288{
1289#if ENABLE(WEBASSEMBLY)
1290 if (!callee)
1291 return nullptr;
1292 if (kind != CodeForCall)
1293 return nullptr;
1294 if (auto* wasmFunction = jsDynamicCast<WebAssemblyFunction*>(vm, callee))
1295 return wasmFunction->jsCallEntrypoint();
1296#else
1297 UNUSED_PARAM(vm);
1298 UNUSED_PARAM(kind);
1299 UNUSED_PARAM(callee);
1300#endif
1301 return nullptr;
1302}
1303
1304} // namespace JSC
1305
1306#endif
1307