| 1 | /* |
| 2 | * Copyright (C) 2011-2019 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
| 14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
| 17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| 18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| 20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| 21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #include "config.h" |
| 27 | #include "Repatch.h" |
| 28 | |
| 29 | #if ENABLE(JIT) |
| 30 | |
| 31 | #include "BinarySwitch.h" |
| 32 | #include "CCallHelpers.h" |
| 33 | #include "CallFrameShuffler.h" |
| 34 | #include "DFGOperations.h" |
| 35 | #include "DFGSpeculativeJIT.h" |
| 36 | #include "DOMJITGetterSetter.h" |
| 37 | #include "DirectArguments.h" |
| 38 | #include "ExecutableBaseInlines.h" |
| 39 | #include "FTLThunks.h" |
| 40 | #include "FullCodeOrigin.h" |
| 41 | #include "FunctionCodeBlock.h" |
| 42 | #include "GCAwareJITStubRoutine.h" |
| 43 | #include "GetterSetter.h" |
| 44 | #include "GetterSetterAccessCase.h" |
| 45 | #include "ICStats.h" |
| 46 | #include "InlineAccess.h" |
| 47 | #include "InstanceOfAccessCase.h" |
| 48 | #include "IntrinsicGetterAccessCase.h" |
| 49 | #include "JIT.h" |
| 50 | #include "JITInlines.h" |
| 51 | #include "JSCInlines.h" |
| 52 | #include "JSModuleNamespaceObject.h" |
| 53 | #include "JSWebAssembly.h" |
| 54 | #include "JSWebAssemblyModule.h" |
| 55 | #include "LinkBuffer.h" |
| 56 | #include "ModuleNamespaceAccessCase.h" |
| 57 | #include "PolymorphicAccess.h" |
| 58 | #include "ScopedArguments.h" |
| 59 | #include "ScratchRegisterAllocator.h" |
| 60 | #include "StackAlignment.h" |
| 61 | #include "StructureRareDataInlines.h" |
| 62 | #include "StructureStubClearingWatchpoint.h" |
| 63 | #include "StructureStubInfo.h" |
| 64 | #include "SuperSampler.h" |
| 65 | #include "ThunkGenerators.h" |
| 66 | #include "WebAssemblyFunction.h" |
| 67 | #include "WebAssemblyToJSCallee.h" |
| 68 | #include <wtf/CommaPrinter.h> |
| 69 | #include <wtf/ListDump.h> |
| 70 | #include <wtf/StringPrintStream.h> |
| 71 | |
| 72 | namespace JSC { |
| 73 | |
| 74 | static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call) |
| 75 | { |
| 76 | FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call); |
| 77 | #if ENABLE(FTL_JIT) |
| 78 | if (codeBlock->jitType() == JITType::FTLJIT) { |
| 79 | MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>(); |
| 80 | return codeBlock->vm().ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>(); |
| 81 | } |
| 82 | #else |
| 83 | UNUSED_PARAM(codeBlock); |
| 84 | #endif // ENABLE(FTL_JIT) |
| 85 | return target.retagged<CFunctionPtrTag>(); |
| 86 | } |
| 87 | |
| 88 | void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction) |
| 89 | { |
| 90 | #if ENABLE(FTL_JIT) |
| 91 | if (codeBlock->jitType() == JITType::FTLJIT) { |
| 92 | VM& vm = codeBlock->vm(); |
| 93 | FTL::Thunks& thunks = *vm.ftlThunks; |
| 94 | FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call); |
| 95 | auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>()); |
| 96 | FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk); |
| 97 | key = key.withCallTarget(newCalleeFunction); |
| 98 | MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(vm, key).retaggedCode<OperationPtrTag>())); |
| 99 | return; |
| 100 | } |
| 101 | #else // ENABLE(FTL_JIT) |
| 102 | UNUSED_PARAM(codeBlock); |
| 103 | #endif // ENABLE(FTL_JIT) |
| 104 | MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>()); |
| 105 | } |
| 106 | |
| 107 | enum InlineCacheAction { |
| 108 | GiveUpOnCache, |
| 109 | RetryCacheLater, |
| 110 | AttemptToCache |
| 111 | }; |
| 112 | |
| 113 | static InlineCacheAction actionForCell(VM& vm, JSCell* cell) |
| 114 | { |
| 115 | Structure* structure = cell->structure(vm); |
| 116 | |
| 117 | TypeInfo typeInfo = structure->typeInfo(); |
| 118 | if (typeInfo.prohibitsPropertyCaching()) |
| 119 | return GiveUpOnCache; |
| 120 | |
| 121 | if (structure->isUncacheableDictionary()) { |
| 122 | if (structure->hasBeenFlattenedBefore()) |
| 123 | return GiveUpOnCache; |
| 124 | // Flattening could have changed the offset, so return early for another try. |
| 125 | asObject(cell)->flattenDictionaryObject(vm); |
| 126 | return RetryCacheLater; |
| 127 | } |
| 128 | |
| 129 | if (!structure->propertyAccessesAreCacheable()) |
| 130 | return GiveUpOnCache; |
| 131 | |
| 132 | return AttemptToCache; |
| 133 | } |
| 134 | |
| 135 | static bool forceICFailure(JSGlobalObject*) |
| 136 | { |
| 137 | return Options::forceICFailure(); |
| 138 | } |
| 139 | |
| 140 | ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result) |
| 141 | { |
| 142 | if (result.shouldResetStubAndFireWatchpoints()) { |
| 143 | result.fireWatchpoints(vm); |
| 144 | stubInfo.reset(codeBlock); |
| 145 | } |
| 146 | } |
| 147 | |
| 148 | inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByFunction(GetByKind kind) |
| 149 | { |
| 150 | switch (kind) { |
| 151 | case GetByKind::Normal: |
| 152 | return operationGetByIdOptimize; |
| 153 | case GetByKind::WithThis: |
| 154 | return operationGetByIdWithThisOptimize; |
| 155 | case GetByKind::Try: |
| 156 | return operationTryGetByIdOptimize; |
| 157 | case GetByKind::Direct: |
| 158 | return operationGetByIdDirectOptimize; |
| 159 | case GetByKind::NormalByVal: |
| 160 | return operationGetByValOptimize; |
| 161 | } |
| 162 | RELEASE_ASSERT_NOT_REACHED(); |
| 163 | } |
| 164 | |
| 165 | inline FunctionPtr<CFunctionPtrTag> appropriateGetByFunction(GetByKind kind) |
| 166 | { |
| 167 | switch (kind) { |
| 168 | case GetByKind::Normal: |
| 169 | return operationGetById; |
| 170 | case GetByKind::WithThis: |
| 171 | return operationGetByIdWithThis; |
| 172 | case GetByKind::Try: |
| 173 | return operationTryGetById; |
| 174 | case GetByKind::Direct: |
| 175 | return operationGetByIdDirect; |
| 176 | case GetByKind::NormalByVal: |
| 177 | return operationGetByValGeneric; |
| 178 | } |
| 179 | RELEASE_ASSERT_NOT_REACHED(); |
| 180 | } |
| 181 | |
| 182 | static InlineCacheAction tryCacheGetBy(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByKind kind) |
| 183 | { |
| 184 | VM& vm = globalObject->vm(); |
| 185 | AccessGenerationResult result; |
| 186 | |
| 187 | { |
| 188 | GCSafeConcurrentJSLocker locker(codeBlock->m_lock, globalObject->vm().heap); |
| 189 | |
| 190 | if (forceICFailure(globalObject)) |
| 191 | return GiveUpOnCache; |
| 192 | |
| 193 | // FIXME: Cache property access for immediates. |
| 194 | if (!baseValue.isCell()) |
| 195 | return GiveUpOnCache; |
| 196 | JSCell* baseCell = baseValue.asCell(); |
| 197 | |
| 198 | std::unique_ptr<AccessCase> newCase; |
| 199 | |
| 200 | if (propertyName == vm.propertyNames->length) { |
| 201 | if (isJSArray(baseCell)) { |
| 202 | if (stubInfo.cacheType() == CacheType::Unset |
| 203 | && slot.slotBase() == baseCell |
| 204 | && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) { |
| 205 | |
| 206 | bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell)); |
| 207 | if (generatedCodeInline) { |
| 208 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByFunction(kind)); |
| 209 | stubInfo.initArrayLength(); |
| 210 | return RetryCacheLater; |
| 211 | } |
| 212 | } |
| 213 | |
| 214 | newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength, propertyName); |
| 215 | } else if (isJSString(baseCell)) { |
| 216 | if (stubInfo.cacheType() == CacheType::Unset && InlineAccess::isCacheableStringLength(stubInfo)) { |
| 217 | bool generatedCodeInline = InlineAccess::generateStringLength(stubInfo); |
| 218 | if (generatedCodeInline) { |
| 219 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByFunction(kind)); |
| 220 | stubInfo.initStringLength(); |
| 221 | return RetryCacheLater; |
| 222 | } |
| 223 | } |
| 224 | |
| 225 | newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength, propertyName); |
| 226 | } else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) { |
| 227 | // If there were overrides, then we can handle this as a normal property load! Guarding |
| 228 | // this with such a check enables us to add an IC case for that load if needed. |
| 229 | if (!arguments->overrodeThings()) |
| 230 | newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength, propertyName); |
| 231 | } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) { |
| 232 | // Ditto. |
| 233 | if (!arguments->overrodeThings()) |
| 234 | newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength, propertyName); |
| 235 | } |
| 236 | } |
| 237 | |
| 238 | if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) { |
| 239 | if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot()) |
| 240 | newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, propertyName, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset)); |
| 241 | } |
| 242 | |
| 243 | if (!newCase) { |
| 244 | if (!slot.isCacheable() && !slot.isUnset()) |
| 245 | return GiveUpOnCache; |
| 246 | |
| 247 | ObjectPropertyConditionSet conditionSet; |
| 248 | Structure* structure = baseCell->structure(vm); |
| 249 | |
| 250 | bool loadTargetFromProxy = false; |
| 251 | if (baseCell->type() == PureForwardingProxyType) { |
| 252 | baseValue = jsCast<JSProxy*>(baseCell)->target(); |
| 253 | baseCell = baseValue.asCell(); |
| 254 | structure = baseCell->structure(vm); |
| 255 | loadTargetFromProxy = true; |
| 256 | } |
| 257 | |
| 258 | InlineCacheAction action = actionForCell(vm, baseCell); |
| 259 | if (action != AttemptToCache) |
| 260 | return action; |
| 261 | |
| 262 | // Optimize self access. |
| 263 | if (stubInfo.cacheType() == CacheType::Unset |
| 264 | && slot.isCacheableValue() |
| 265 | && slot.slotBase() == baseValue |
| 266 | && !slot.watchpointSet() |
| 267 | && !structure->needImpurePropertyWatchpoint() |
| 268 | && !loadTargetFromProxy) { |
| 269 | |
| 270 | bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset()); |
| 271 | if (generatedCodeInline) { |
| 272 | LOG_IC((ICEvent::GetBySelfPatch, structure->classInfo(), propertyName, slot.slotBase() == baseValue)); |
| 273 | structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset()); |
| 274 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByFunction(kind)); |
| 275 | stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset(), propertyName); |
| 276 | return RetryCacheLater; |
| 277 | } |
| 278 | } |
| 279 | |
| 280 | std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain; |
| 281 | |
| 282 | PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset(); |
| 283 | |
| 284 | if (slot.isUnset() || slot.slotBase() != baseValue) { |
| 285 | if (structure->typeInfo().prohibitsPropertyCaching()) |
| 286 | return GiveUpOnCache; |
| 287 | |
| 288 | if (structure->isDictionary()) { |
| 289 | if (structure->hasBeenFlattenedBefore()) |
| 290 | return GiveUpOnCache; |
| 291 | structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell)); |
| 292 | return RetryCacheLater; // We may have changed property offsets. |
| 293 | } |
| 294 | |
| 295 | if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence()) |
| 296 | return GiveUpOnCache; |
| 297 | |
| 298 | // If a kind is GetByKind::Direct, we do not need to investigate prototype chains further. |
| 299 | // Cacheability just depends on the head structure. |
| 300 | if (kind != GetByKind::Direct) { |
| 301 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, baseCell, slot); |
| 302 | if (!cacheStatus) |
| 303 | return GiveUpOnCache; |
| 304 | |
| 305 | if (cacheStatus->flattenedDictionary) { |
| 306 | // Property offsets may have changed due to flattening. We'll cache later. |
| 307 | return RetryCacheLater; |
| 308 | } |
| 309 | |
| 310 | if (cacheStatus->usesPolyProto) { |
| 311 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, baseCell, slot); |
| 312 | if (!prototypeAccessChain) |
| 313 | return GiveUpOnCache; |
| 314 | RELEASE_ASSERT(slot.isCacheableCustom() || prototypeAccessChain->slotBaseStructure(structure)->get(vm, propertyName) == offset); |
| 315 | } else { |
| 316 | // We use ObjectPropertyConditionSet instead for faster accesses. |
| 317 | prototypeAccessChain = nullptr; |
| 318 | |
| 319 | // FIXME: Maybe this `if` should be inside generateConditionsForPropertyBlah. |
| 320 | // https://bugs.webkit.org/show_bug.cgi?id=185215 |
| 321 | if (slot.isUnset()) { |
| 322 | conditionSet = generateConditionsForPropertyMiss( |
| 323 | vm, codeBlock, globalObject, structure, propertyName.impl()); |
| 324 | } else if (!slot.isCacheableCustom()) { |
| 325 | conditionSet = generateConditionsForPrototypePropertyHit( |
| 326 | vm, codeBlock, globalObject, structure, slot.slotBase(), |
| 327 | propertyName.impl()); |
| 328 | RELEASE_ASSERT(!conditionSet.isValid() || conditionSet.slotBaseCondition().offset() == offset); |
| 329 | } else { |
| 330 | conditionSet = generateConditionsForPrototypePropertyHitCustom( |
| 331 | vm, codeBlock, globalObject, structure, slot.slotBase(), |
| 332 | propertyName.impl(), slot.attributes()); |
| 333 | } |
| 334 | |
| 335 | if (!conditionSet.isValid()) |
| 336 | return GiveUpOnCache; |
| 337 | } |
| 338 | } |
| 339 | } |
| 340 | |
| 341 | JSFunction* getter = nullptr; |
| 342 | if (slot.isCacheableGetter()) |
| 343 | getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter()); |
| 344 | |
| 345 | Optional<DOMAttributeAnnotation> domAttribute; |
| 346 | if (slot.isCacheableCustom() && slot.domAttribute()) |
| 347 | domAttribute = slot.domAttribute(); |
| 348 | |
| 349 | if (kind == GetByKind::Try) { |
| 350 | AccessCase::AccessType type; |
| 351 | if (slot.isCacheableValue()) |
| 352 | type = AccessCase::Load; |
| 353 | else if (slot.isUnset()) |
| 354 | type = AccessCase::Miss; |
| 355 | else if (slot.isCacheableGetter()) |
| 356 | type = AccessCase::GetGetter; |
| 357 | else |
| 358 | RELEASE_ASSERT_NOT_REACHED(); |
| 359 | |
| 360 | newCase = ProxyableAccessCase::create(vm, codeBlock, type, propertyName, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain)); |
| 361 | } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure)) |
| 362 | newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, propertyName, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain)); |
| 363 | else { |
| 364 | if (slot.isCacheableValue() || slot.isUnset()) { |
| 365 | newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load, |
| 366 | propertyName, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain)); |
| 367 | } else { |
| 368 | AccessCase::AccessType type; |
| 369 | if (slot.isCacheableGetter()) |
| 370 | type = AccessCase::Getter; |
| 371 | else if (slot.attributes() & PropertyAttribute::CustomAccessor) |
| 372 | type = AccessCase::CustomAccessorGetter; |
| 373 | else |
| 374 | type = AccessCase::CustomValueGetter; |
| 375 | |
| 376 | if (kind == GetByKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute) |
| 377 | return GiveUpOnCache; |
| 378 | |
| 379 | newCase = GetterSetterAccessCase::create( |
| 380 | vm, codeBlock, type, propertyName, offset, structure, conditionSet, loadTargetFromProxy, |
| 381 | slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr, |
| 382 | slot.isCacheableCustom() && slot.slotBase() != baseValue ? slot.slotBase() : nullptr, |
| 383 | domAttribute, WTFMove(prototypeAccessChain)); |
| 384 | } |
| 385 | } |
| 386 | } |
| 387 | |
| 388 | LOG_IC((ICEvent::GetByAddAccessCase, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue)); |
| 389 | |
| 390 | result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase)); |
| 391 | |
| 392 | if (result.generatedSomeCode()) { |
| 393 | LOG_IC((ICEvent::GetByReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue)); |
| 394 | |
| 395 | RELEASE_ASSERT(result.code()); |
| 396 | InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code())); |
| 397 | } |
| 398 | } |
| 399 | |
| 400 | fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result); |
| 401 | |
| 402 | return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater; |
| 403 | } |
| 404 | |
| 405 | void repatchGetBy(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByKind kind) |
| 406 | { |
| 407 | SuperSamplerScope superSamplerScope(false); |
| 408 | |
| 409 | if (tryCacheGetBy(globalObject, codeBlock, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) |
| 410 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByFunction(kind)); |
| 411 | } |
| 412 | |
| 413 | |
| 414 | static InlineCacheAction tryCacheArrayGetByVal(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue baseValue, JSValue index, StructureStubInfo& stubInfo) |
| 415 | { |
| 416 | if (!baseValue.isCell()) |
| 417 | return GiveUpOnCache; |
| 418 | |
| 419 | if (!index.isInt32()) |
| 420 | return RetryCacheLater; |
| 421 | |
| 422 | VM& vm = globalObject->vm(); |
| 423 | AccessGenerationResult result; |
| 424 | |
| 425 | { |
| 426 | GCSafeConcurrentJSLocker locker(codeBlock->m_lock, globalObject->vm().heap); |
| 427 | |
| 428 | JSCell* base = baseValue.asCell(); |
| 429 | |
| 430 | AccessCase::AccessType accessType; |
| 431 | if (base->type() == DirectArgumentsType) |
| 432 | accessType = AccessCase::IndexedDirectArgumentsLoad; |
| 433 | else if (base->type() == ScopedArgumentsType) |
| 434 | accessType = AccessCase::IndexedScopedArgumentsLoad; |
| 435 | else if (base->type() == StringType) |
| 436 | accessType = AccessCase::IndexedStringLoad; |
| 437 | else if (isTypedView(base->classInfo(vm)->typedArrayStorageType)) { |
| 438 | switch (base->classInfo(vm)->typedArrayStorageType) { |
| 439 | case TypeInt8: |
| 440 | accessType = AccessCase::IndexedTypedArrayInt8Load; |
| 441 | break; |
| 442 | case TypeUint8: |
| 443 | accessType = AccessCase::IndexedTypedArrayUint8Load; |
| 444 | break; |
| 445 | case TypeUint8Clamped: |
| 446 | accessType = AccessCase::IndexedTypedArrayUint8ClampedLoad; |
| 447 | break; |
| 448 | case TypeInt16: |
| 449 | accessType = AccessCase::IndexedTypedArrayInt16Load; |
| 450 | break; |
| 451 | case TypeUint16: |
| 452 | accessType = AccessCase::IndexedTypedArrayUint16Load; |
| 453 | break; |
| 454 | case TypeInt32: |
| 455 | accessType = AccessCase::IndexedTypedArrayInt32Load; |
| 456 | break; |
| 457 | case TypeUint32: |
| 458 | accessType = AccessCase::IndexedTypedArrayUint32Load; |
| 459 | break; |
| 460 | case TypeFloat32: |
| 461 | accessType = AccessCase::IndexedTypedArrayFloat32Load; |
| 462 | break; |
| 463 | case TypeFloat64: |
| 464 | accessType = AccessCase::IndexedTypedArrayFloat64Load; |
| 465 | break; |
| 466 | default: |
| 467 | RELEASE_ASSERT_NOT_REACHED(); |
| 468 | } |
| 469 | } else { |
| 470 | IndexingType indexingShape = base->indexingType() & IndexingShapeMask; |
| 471 | switch (indexingShape) { |
| 472 | case Int32Shape: |
| 473 | accessType = AccessCase::IndexedInt32Load; |
| 474 | break; |
| 475 | case DoubleShape: |
| 476 | accessType = AccessCase::IndexedDoubleLoad; |
| 477 | break; |
| 478 | case ContiguousShape: |
| 479 | accessType = AccessCase::IndexedContiguousLoad; |
| 480 | break; |
| 481 | case ArrayStorageShape: |
| 482 | accessType = AccessCase::IndexedArrayStorageLoad; |
| 483 | break; |
| 484 | default: |
| 485 | return GiveUpOnCache; |
| 486 | } |
| 487 | } |
| 488 | |
| 489 | result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), AccessCase::create(vm, codeBlock, accessType, Identifier())); |
| 490 | |
| 491 | if (result.generatedSomeCode()) { |
| 492 | LOG_IC((ICEvent::GetByReplaceWithJump, baseValue.classInfoOrNull(vm), Identifier())); |
| 493 | |
| 494 | RELEASE_ASSERT(result.code()); |
| 495 | InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code())); |
| 496 | } |
| 497 | } |
| 498 | |
| 499 | fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result); |
| 500 | return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater; |
| 501 | } |
| 502 | |
| 503 | void repatchArrayGetByVal(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue base, JSValue index, StructureStubInfo& stubInfo) |
| 504 | { |
| 505 | if (tryCacheArrayGetByVal(globalObject, codeBlock, base, index, stubInfo) == GiveUpOnCache) |
| 506 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationGetByValGeneric); |
| 507 | } |
| 508 | |
| 509 | static V_JITOperation_GSsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) |
| 510 | { |
| 511 | if (slot.isStrictMode()) { |
| 512 | if (putKind == Direct) |
| 513 | return operationPutByIdDirectStrict; |
| 514 | return operationPutByIdStrict; |
| 515 | } |
| 516 | if (putKind == Direct) |
| 517 | return operationPutByIdDirectNonStrict; |
| 518 | return operationPutByIdNonStrict; |
| 519 | } |
| 520 | |
| 521 | static V_JITOperation_GSsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) |
| 522 | { |
| 523 | if (slot.isStrictMode()) { |
| 524 | if (putKind == Direct) |
| 525 | return operationPutByIdDirectStrictOptimize; |
| 526 | return operationPutByIdStrictOptimize; |
| 527 | } |
| 528 | if (putKind == Direct) |
| 529 | return operationPutByIdDirectNonStrictOptimize; |
| 530 | return operationPutByIdNonStrictOptimize; |
| 531 | } |
| 532 | |
| 533 | static InlineCacheAction tryCachePutByID(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) |
| 534 | { |
| 535 | VM& vm = globalObject->vm(); |
| 536 | AccessGenerationResult result; |
| 537 | { |
| 538 | GCSafeConcurrentJSLocker locker(codeBlock->m_lock, globalObject->vm().heap); |
| 539 | |
| 540 | if (forceICFailure(globalObject)) |
| 541 | return GiveUpOnCache; |
| 542 | |
| 543 | if (!baseValue.isCell()) |
| 544 | return GiveUpOnCache; |
| 545 | |
| 546 | if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter()) |
| 547 | return GiveUpOnCache; |
| 548 | |
| 549 | // FIXME: We should try to do something smarter here... |
| 550 | if (isCopyOnWrite(structure->indexingMode())) |
| 551 | return GiveUpOnCache; |
| 552 | // We can't end up storing to a CoW on the prototype since it shouldn't own properties. |
| 553 | ASSERT(!isCopyOnWrite(slot.base()->indexingMode())); |
| 554 | |
| 555 | if (!structure->propertyAccessesAreCacheable()) |
| 556 | return GiveUpOnCache; |
| 557 | |
| 558 | std::unique_ptr<AccessCase> newCase; |
| 559 | JSCell* baseCell = baseValue.asCell(); |
| 560 | |
| 561 | if (slot.base() == baseValue && slot.isCacheablePut()) { |
| 562 | if (slot.type() == PutPropertySlot::ExistingProperty) { |
| 563 | // This assert helps catch bugs if we accidentally forget to disable caching |
| 564 | // when we transition then store to an existing property. This is common among |
| 565 | // paths that reify lazy properties. If we reify a lazy property and forget |
| 566 | // to disable caching, we may come down this path. The Replace IC does not |
| 567 | // know how to model these types of structure transitions (or any structure |
| 568 | // transition for that matter). |
| 569 | RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure); |
| 570 | |
| 571 | structure->didCachePropertyReplacement(vm, slot.cachedOffset()); |
| 572 | |
| 573 | if (stubInfo.cacheType() == CacheType::Unset |
| 574 | && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset()) |
| 575 | && !structure->needImpurePropertyWatchpoint()) { |
| 576 | |
| 577 | bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset()); |
| 578 | if (generatedCodeInline) { |
| 579 | LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident, slot.base() == baseValue)); |
| 580 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind)); |
| 581 | stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset()); |
| 582 | return RetryCacheLater; |
| 583 | } |
| 584 | } |
| 585 | |
| 586 | newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, ident, slot.cachedOffset(), structure); |
| 587 | } else { |
| 588 | ASSERT(slot.type() == PutPropertySlot::NewProperty); |
| 589 | |
| 590 | if (!structure->isObject()) |
| 591 | return GiveUpOnCache; |
| 592 | |
| 593 | if (structure->isDictionary()) { |
| 594 | if (structure->hasBeenFlattenedBefore()) |
| 595 | return GiveUpOnCache; |
| 596 | structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue)); |
| 597 | return RetryCacheLater; |
| 598 | } |
| 599 | |
| 600 | PropertyOffset offset; |
| 601 | Structure* newStructure = Structure::addPropertyTransitionToExistingStructureConcurrently(structure, ident.impl(), static_cast<unsigned>(PropertyAttribute::None), offset); |
| 602 | if (!newStructure || !newStructure->propertyAccessesAreCacheable()) |
| 603 | return GiveUpOnCache; |
| 604 | |
| 605 | ASSERT(newStructure->previousID() == structure); |
| 606 | ASSERT(!newStructure->isDictionary()); |
| 607 | ASSERT(newStructure->isObject()); |
| 608 | |
| 609 | std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain; |
| 610 | ObjectPropertyConditionSet conditionSet; |
| 611 | if (putKind == NotDirect) { |
| 612 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, baseCell, nullptr); |
| 613 | if (!cacheStatus) |
| 614 | return GiveUpOnCache; |
| 615 | |
| 616 | if (cacheStatus->usesPolyProto) { |
| 617 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, baseCell, nullptr); |
| 618 | if (!prototypeAccessChain) |
| 619 | return GiveUpOnCache; |
| 620 | } else { |
| 621 | prototypeAccessChain = nullptr; |
| 622 | conditionSet = generateConditionsForPropertySetterMiss( |
| 623 | vm, codeBlock, globalObject, newStructure, ident.impl()); |
| 624 | if (!conditionSet.isValid()) |
| 625 | return GiveUpOnCache; |
| 626 | } |
| 627 | } |
| 628 | |
| 629 | newCase = AccessCase::create(vm, codeBlock, ident, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain)); |
| 630 | } |
| 631 | } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) { |
| 632 | if (slot.isCacheableCustom()) { |
| 633 | ObjectPropertyConditionSet conditionSet; |
| 634 | std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain; |
| 635 | |
| 636 | if (slot.base() != baseValue) { |
| 637 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, baseCell, slot.base()); |
| 638 | if (!cacheStatus) |
| 639 | return GiveUpOnCache; |
| 640 | |
| 641 | if (cacheStatus->usesPolyProto) { |
| 642 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, baseCell, slot.base()); |
| 643 | if (!prototypeAccessChain) |
| 644 | return GiveUpOnCache; |
| 645 | } else { |
| 646 | prototypeAccessChain = nullptr; |
| 647 | conditionSet = generateConditionsForPrototypePropertyHitCustom( |
| 648 | vm, codeBlock, globalObject, structure, slot.base(), ident.impl(), static_cast<unsigned>(PropertyAttribute::None)); |
| 649 | if (!conditionSet.isValid()) |
| 650 | return GiveUpOnCache; |
| 651 | } |
| 652 | } |
| 653 | |
| 654 | newCase = GetterSetterAccessCase::create( |
| 655 | vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, ident, |
| 656 | invalidOffset, conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base() != baseValue ? slot.base() : nullptr); |
| 657 | } else { |
| 658 | ObjectPropertyConditionSet conditionSet; |
| 659 | std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain; |
| 660 | PropertyOffset offset = slot.cachedOffset(); |
| 661 | |
| 662 | if (slot.base() != baseValue) { |
| 663 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, baseCell, slot.base()); |
| 664 | if (!cacheStatus) |
| 665 | return GiveUpOnCache; |
| 666 | if (cacheStatus->flattenedDictionary) |
| 667 | return RetryCacheLater; |
| 668 | |
| 669 | if (cacheStatus->usesPolyProto) { |
| 670 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, baseCell, slot.base()); |
| 671 | if (!prototypeAccessChain) |
| 672 | return GiveUpOnCache; |
| 673 | offset = prototypeAccessChain->slotBaseStructure(baseCell->structure(vm))->get(vm, ident.impl()); |
| 674 | } else { |
| 675 | prototypeAccessChain = nullptr; |
| 676 | conditionSet = generateConditionsForPrototypePropertyHit( |
| 677 | vm, codeBlock, globalObject, structure, slot.base(), ident.impl()); |
| 678 | if (!conditionSet.isValid()) |
| 679 | return GiveUpOnCache; |
| 680 | |
| 681 | if (!(conditionSet.slotBaseCondition().attributes() & PropertyAttribute::Accessor)) |
| 682 | return GiveUpOnCache; |
| 683 | |
| 684 | offset = conditionSet.slotBaseCondition().offset(); |
| 685 | } |
| 686 | } |
| 687 | |
| 688 | newCase = GetterSetterAccessCase::create( |
| 689 | vm, codeBlock, AccessCase::Setter, structure, ident, offset, conditionSet, WTFMove(prototypeAccessChain)); |
| 690 | } |
| 691 | } |
| 692 | |
| 693 | LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident, slot.base() == baseValue)); |
| 694 | |
| 695 | result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase)); |
| 696 | |
| 697 | if (result.generatedSomeCode()) { |
| 698 | LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident, slot.base() == baseValue)); |
| 699 | |
| 700 | RELEASE_ASSERT(result.code()); |
| 701 | |
| 702 | InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code())); |
| 703 | } |
| 704 | } |
| 705 | |
| 706 | fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result); |
| 707 | |
| 708 | return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater; |
| 709 | } |
| 710 | |
| 711 | void repatchPutByID(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) |
| 712 | { |
| 713 | SuperSamplerScope superSamplerScope(false); |
| 714 | |
| 715 | if (tryCachePutByID(globalObject, codeBlock, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) |
| 716 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind)); |
| 717 | } |
| 718 | |
| 719 | static InlineCacheAction tryCacheInByID( |
| 720 | JSGlobalObject* globalObject, CodeBlock* codeBlock, JSObject* base, const Identifier& ident, |
| 721 | bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo) |
| 722 | { |
| 723 | VM& vm = globalObject->vm(); |
| 724 | AccessGenerationResult result; |
| 725 | |
| 726 | { |
| 727 | GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap); |
| 728 | if (forceICFailure(globalObject)) |
| 729 | return GiveUpOnCache; |
| 730 | |
| 731 | if (!base->structure(vm)->propertyAccessesAreCacheable() || (!wasFound && !base->structure(vm)->propertyAccessesAreCacheableForAbsence())) |
| 732 | return GiveUpOnCache; |
| 733 | |
| 734 | if (wasFound) { |
| 735 | if (!slot.isCacheable()) |
| 736 | return GiveUpOnCache; |
| 737 | } |
| 738 | |
| 739 | Structure* structure = base->structure(vm); |
| 740 | |
| 741 | std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain; |
| 742 | ObjectPropertyConditionSet conditionSet; |
| 743 | if (wasFound) { |
| 744 | InlineCacheAction action = actionForCell(vm, base); |
| 745 | if (action != AttemptToCache) |
| 746 | return action; |
| 747 | |
| 748 | // Optimize self access. |
| 749 | if (stubInfo.cacheType() == CacheType::Unset |
| 750 | && slot.isCacheableValue() |
| 751 | && slot.slotBase() == base |
| 752 | && !slot.watchpointSet() |
| 753 | && !structure->needImpurePropertyWatchpoint()) { |
| 754 | bool generatedCodeInline = InlineAccess::generateSelfInAccess(stubInfo, structure); |
| 755 | if (generatedCodeInline) { |
| 756 | LOG_IC((ICEvent::InByIdSelfPatch, structure->classInfo(), ident, slot.slotBase() == base)); |
| 757 | structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset()); |
| 758 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize); |
| 759 | stubInfo.initInByIdSelf(codeBlock, structure, slot.cachedOffset()); |
| 760 | return RetryCacheLater; |
| 761 | } |
| 762 | } |
| 763 | |
| 764 | if (slot.slotBase() != base) { |
| 765 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, base, slot); |
| 766 | if (!cacheStatus) |
| 767 | return GiveUpOnCache; |
| 768 | if (cacheStatus->flattenedDictionary) |
| 769 | return RetryCacheLater; |
| 770 | |
| 771 | if (cacheStatus->usesPolyProto) { |
| 772 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, base, slot); |
| 773 | if (!prototypeAccessChain) |
| 774 | return GiveUpOnCache; |
| 775 | RELEASE_ASSERT(slot.isCacheableCustom() || prototypeAccessChain->slotBaseStructure(structure)->get(vm, ident.impl()) == slot.cachedOffset()); |
| 776 | } else { |
| 777 | prototypeAccessChain = nullptr; |
| 778 | conditionSet = generateConditionsForPrototypePropertyHit( |
| 779 | vm, codeBlock, globalObject, structure, slot.slotBase(), ident.impl()); |
| 780 | if (!conditionSet.isValid()) |
| 781 | return GiveUpOnCache; |
| 782 | RELEASE_ASSERT(slot.isCacheableCustom() || conditionSet.slotBaseCondition().offset() == slot.cachedOffset()); |
| 783 | } |
| 784 | } |
| 785 | } else { |
| 786 | auto cacheStatus = preparePrototypeChainForCaching(globalObject, base, nullptr); |
| 787 | if (!cacheStatus) |
| 788 | return GiveUpOnCache; |
| 789 | |
| 790 | if (cacheStatus->usesPolyProto) { |
| 791 | prototypeAccessChain = PolyProtoAccessChain::create(globalObject, base, slot); |
| 792 | if (!prototypeAccessChain) |
| 793 | return GiveUpOnCache; |
| 794 | } else { |
| 795 | prototypeAccessChain = nullptr; |
| 796 | conditionSet = generateConditionsForPropertyMiss( |
| 797 | vm, codeBlock, globalObject, structure, ident.impl()); |
| 798 | if (!conditionSet.isValid()) |
| 799 | return GiveUpOnCache; |
| 800 | } |
| 801 | } |
| 802 | |
| 803 | LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident, slot.slotBase() == base)); |
| 804 | |
| 805 | std::unique_ptr<AccessCase> newCase = AccessCase::create( |
| 806 | vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, ident, wasFound ? slot.cachedOffset() : invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain)); |
| 807 | |
| 808 | result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase)); |
| 809 | |
| 810 | if (result.generatedSomeCode()) { |
| 811 | LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident, slot.slotBase() == base)); |
| 812 | |
| 813 | RELEASE_ASSERT(result.code()); |
| 814 | InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code())); |
| 815 | } |
| 816 | } |
| 817 | |
| 818 | fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result); |
| 819 | |
| 820 | return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater; |
| 821 | } |
| 822 | |
| 823 | void repatchInByID(JSGlobalObject* globalObject, CodeBlock* codeBlock, JSObject* baseObject, const Identifier& propertyName, bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo) |
| 824 | { |
| 825 | SuperSamplerScope superSamplerScope(false); |
| 826 | |
| 827 | if (tryCacheInByID(globalObject, codeBlock, baseObject, propertyName, wasFound, slot, stubInfo) == GiveUpOnCache) |
| 828 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInById); |
| 829 | } |
| 830 | |
| 831 | static InlineCacheAction tryCacheInstanceOf( |
| 832 | JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo, |
| 833 | bool wasFound) |
| 834 | { |
| 835 | VM& vm = globalObject->vm(); |
| 836 | AccessGenerationResult result; |
| 837 | |
| 838 | RELEASE_ASSERT(valueValue.isCell()); // shouldConsiderCaching rejects non-cells. |
| 839 | |
| 840 | if (forceICFailure(globalObject)) |
| 841 | return GiveUpOnCache; |
| 842 | |
| 843 | { |
| 844 | GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap); |
| 845 | |
| 846 | JSCell* value = valueValue.asCell(); |
| 847 | Structure* structure = value->structure(vm); |
| 848 | std::unique_ptr<AccessCase> newCase; |
| 849 | JSObject* prototype = jsDynamicCast<JSObject*>(vm, prototypeValue); |
| 850 | if (prototype) { |
| 851 | if (!jsDynamicCast<JSObject*>(vm, value)) { |
| 852 | newCase = InstanceOfAccessCase::create( |
| 853 | vm, codeBlock, AccessCase::InstanceOfMiss, structure, ObjectPropertyConditionSet(), |
| 854 | prototype); |
| 855 | } else if (structure->prototypeQueriesAreCacheable()) { |
| 856 | // FIXME: Teach this to do poly proto. |
| 857 | // https://bugs.webkit.org/show_bug.cgi?id=185663 |
| 858 | preparePrototypeChainForCaching(globalObject, value, wasFound ? prototype : nullptr); |
| 859 | ObjectPropertyConditionSet conditionSet = generateConditionsForInstanceOf( |
| 860 | vm, codeBlock, globalObject, structure, prototype, wasFound); |
| 861 | |
| 862 | if (conditionSet.isValid()) { |
| 863 | newCase = InstanceOfAccessCase::create( |
| 864 | vm, codeBlock, |
| 865 | wasFound ? AccessCase::InstanceOfHit : AccessCase::InstanceOfMiss, |
| 866 | structure, conditionSet, prototype); |
| 867 | } |
| 868 | } |
| 869 | } |
| 870 | |
| 871 | if (!newCase) |
| 872 | newCase = AccessCase::create(vm, codeBlock, AccessCase::InstanceOfGeneric, Identifier()); |
| 873 | |
| 874 | LOG_IC((ICEvent::InstanceOfAddAccessCase, structure->classInfo(), Identifier())); |
| 875 | |
| 876 | result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), WTFMove(newCase)); |
| 877 | |
| 878 | if (result.generatedSomeCode()) { |
| 879 | LOG_IC((ICEvent::InstanceOfReplaceWithJump, structure->classInfo(), Identifier())); |
| 880 | |
| 881 | RELEASE_ASSERT(result.code()); |
| 882 | |
| 883 | MacroAssembler::repatchJump( |
| 884 | stubInfo.patchableJump(), |
| 885 | CodeLocationLabel<JITStubRoutinePtrTag>(result.code())); |
| 886 | } |
| 887 | } |
| 888 | |
| 889 | fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result); |
| 890 | |
| 891 | return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater; |
| 892 | } |
| 893 | |
| 894 | void repatchInstanceOf( |
| 895 | JSGlobalObject* globalObject, CodeBlock* codeBlock, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo, |
| 896 | bool wasFound) |
| 897 | { |
| 898 | SuperSamplerScope superSamplerScope(false); |
| 899 | if (tryCacheInstanceOf(globalObject, codeBlock, valueValue, prototypeValue, stubInfo, wasFound) == GiveUpOnCache) |
| 900 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInstanceOfGeneric); |
| 901 | } |
| 902 | |
| 903 | static void linkSlowFor(VM&, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef) |
| 904 | { |
| 905 | MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code())); |
| 906 | } |
| 907 | |
| 908 | static void linkSlowFor(VM& vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator) |
| 909 | { |
| 910 | linkSlowFor(vm, callLinkInfo, vm.getCTIStub(generator).retagged<JITStubRoutinePtrTag>()); |
| 911 | } |
| 912 | |
| 913 | static void linkSlowFor(VM& vm, CallLinkInfo& callLinkInfo) |
| 914 | { |
| 915 | MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo); |
| 916 | linkSlowFor(vm, callLinkInfo, virtualThunk); |
| 917 | callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true)); |
| 918 | } |
| 919 | |
| 920 | static JSCell* webAssemblyOwner(JSCell* callee) |
| 921 | { |
| 922 | #if ENABLE(WEBASSEMBLY) |
| 923 | // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner. |
| 924 | return jsCast<WebAssemblyToJSCallee*>(callee)->module(); |
| 925 | #else |
| 926 | UNUSED_PARAM(callee); |
| 927 | RELEASE_ASSERT_NOT_REACHED(); |
| 928 | return nullptr; |
| 929 | #endif // ENABLE(WEBASSEMBLY) |
| 930 | } |
| 931 | |
| 932 | void linkFor( |
| 933 | VM& vm, CallFrame* callFrame, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, |
| 934 | JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr) |
| 935 | { |
| 936 | ASSERT(!callLinkInfo.stub()); |
| 937 | |
| 938 | CallFrame* callerFrame = callFrame->callerFrame(); |
| 939 | // Our caller must have a cell for a callee. When calling |
| 940 | // this from Wasm, we ensure the callee is a cell. |
| 941 | ASSERT(callerFrame->callee().isCell()); |
| 942 | |
| 943 | CodeBlock* callerCodeBlock = callerFrame->codeBlock(); |
| 944 | |
| 945 | // WebAssembly -> JS stubs don't have a valid CodeBlock. |
| 946 | JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock; |
| 947 | ASSERT(owner); |
| 948 | |
| 949 | ASSERT(!callLinkInfo.isLinked()); |
| 950 | callLinkInfo.setCallee(vm, owner, callee); |
| 951 | MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), callee); |
| 952 | callLinkInfo.setLastSeenCallee(vm, owner, callee); |
| 953 | if (shouldDumpDisassemblyFor(callerCodeBlock)) |
| 954 | dataLog("Linking call in " , FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to " , pointerDump(calleeCodeBlock), ", entrypoint at " , codePtr, "\n" ); |
| 955 | |
| 956 | MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr)); |
| 957 | |
| 958 | if (calleeCodeBlock) |
| 959 | calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo); |
| 960 | |
| 961 | if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) { |
| 962 | linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator); |
| 963 | return; |
| 964 | } |
| 965 | |
| 966 | linkSlowFor(vm, callLinkInfo); |
| 967 | } |
| 968 | |
| 969 | void linkDirectFor( |
| 970 | CallFrame* callFrame, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, |
| 971 | MacroAssemblerCodePtr<JSEntryPtrTag> codePtr) |
| 972 | { |
| 973 | ASSERT(!callLinkInfo.stub()); |
| 974 | |
| 975 | CodeBlock* callerCodeBlock = callFrame->codeBlock(); |
| 976 | |
| 977 | VM& vm = callerCodeBlock->vm(); |
| 978 | |
| 979 | ASSERT(!callLinkInfo.isLinked()); |
| 980 | callLinkInfo.setCodeBlock(vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock)); |
| 981 | if (shouldDumpDisassemblyFor(callerCodeBlock)) |
| 982 | dataLog("Linking call in " , FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to " , pointerDump(calleeCodeBlock), ", entrypoint at " , codePtr, "\n" ); |
| 983 | |
| 984 | if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall) |
| 985 | MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump()); |
| 986 | MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr)); |
| 987 | |
| 988 | if (calleeCodeBlock) |
| 989 | calleeCodeBlock->linkIncomingCall(callFrame, &callLinkInfo); |
| 990 | } |
| 991 | |
| 992 | void linkSlowFor(CallFrame* callFrame, CallLinkInfo& callLinkInfo) |
| 993 | { |
| 994 | CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock(); |
| 995 | VM& vm = callerCodeBlock->vm(); |
| 996 | |
| 997 | linkSlowFor(vm, callLinkInfo); |
| 998 | } |
| 999 | |
| 1000 | static void revertCall(VM& vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef) |
| 1001 | { |
| 1002 | if (callLinkInfo.isDirect()) { |
| 1003 | callLinkInfo.clearCodeBlock(); |
| 1004 | if (!callLinkInfo.clearedByJettison()) { |
| 1005 | if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall) |
| 1006 | MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart()); |
| 1007 | else |
| 1008 | MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart()); |
| 1009 | } |
| 1010 | } else { |
| 1011 | if (!callLinkInfo.clearedByJettison()) { |
| 1012 | MacroAssembler::revertJumpReplacementToBranchPtrWithPatch( |
| 1013 | MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()), |
| 1014 | callLinkInfo.calleeGPR(), 0); |
| 1015 | linkSlowFor(vm, callLinkInfo, codeRef); |
| 1016 | MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), nullptr); |
| 1017 | } |
| 1018 | callLinkInfo.clearCallee(); |
| 1019 | } |
| 1020 | callLinkInfo.clearSeen(); |
| 1021 | callLinkInfo.clearStub(); |
| 1022 | callLinkInfo.clearSlowStub(); |
| 1023 | if (callLinkInfo.isOnList()) |
| 1024 | callLinkInfo.remove(); |
| 1025 | } |
| 1026 | |
| 1027 | void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo) |
| 1028 | { |
| 1029 | if (Options::dumpDisassembly()) |
| 1030 | dataLog("Unlinking call at " , callLinkInfo.hotPathOther(), "\n" ); |
| 1031 | |
| 1032 | revertCall(vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>()); |
| 1033 | } |
| 1034 | |
| 1035 | static void linkVirtualFor(VM& vm, CallFrame* callFrame, CallLinkInfo& callLinkInfo) |
| 1036 | { |
| 1037 | CallFrame* callerFrame = callFrame->callerFrame(); |
| 1038 | CodeBlock* callerCodeBlock = callerFrame->codeBlock(); |
| 1039 | |
| 1040 | if (shouldDumpDisassemblyFor(callerCodeBlock)) |
| 1041 | dataLog("Linking virtual call at " , FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n" ); |
| 1042 | |
| 1043 | MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo); |
| 1044 | revertCall(vm, callLinkInfo, virtualThunk); |
| 1045 | callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true)); |
| 1046 | callLinkInfo.setClearedByVirtual(); |
| 1047 | } |
| 1048 | |
| 1049 | namespace { |
| 1050 | struct CallToCodePtr { |
| 1051 | CCallHelpers::Call call; |
| 1052 | MacroAssemblerCodePtr<JSEntryPtrTag> codePtr; |
| 1053 | }; |
| 1054 | } // annonymous namespace |
| 1055 | |
| 1056 | void linkPolymorphicCall(JSGlobalObject* globalObject, CallFrame* callFrame, CallLinkInfo& callLinkInfo, CallVariant newVariant) |
| 1057 | { |
| 1058 | RELEASE_ASSERT(callLinkInfo.allowStubs()); |
| 1059 | |
| 1060 | CallFrame* callerFrame = callFrame->callerFrame(); |
| 1061 | VM& vm = globalObject->vm(); |
| 1062 | |
| 1063 | // During execution of linkPolymorphicCall, we strongly assume that we never do GC. |
| 1064 | // GC jettisons CodeBlocks, changes CallLinkInfo etc. and breaks assumption done before and after this call. |
| 1065 | DeferGCForAWhile deferGCForAWhile(vm.heap); |
| 1066 | |
| 1067 | if (!newVariant) { |
| 1068 | linkVirtualFor(vm, callFrame, callLinkInfo); |
| 1069 | return; |
| 1070 | } |
| 1071 | |
| 1072 | // Our caller must be have a cell for a callee. When calling |
| 1073 | // this from Wasm, we ensure the callee is a cell. |
| 1074 | ASSERT(callerFrame->callee().isCell()); |
| 1075 | |
| 1076 | CodeBlock* callerCodeBlock = callerFrame->codeBlock(); |
| 1077 | bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell()); |
| 1078 | |
| 1079 | // WebAssembly -> JS stubs don't have a valid CodeBlock. |
| 1080 | JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock; |
| 1081 | ASSERT(owner); |
| 1082 | |
| 1083 | CallVariantList list; |
| 1084 | if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub()) |
| 1085 | list = stub->variants(); |
| 1086 | else if (JSObject* oldCallee = callLinkInfo.callee()) |
| 1087 | list = CallVariantList { CallVariant(oldCallee) }; |
| 1088 | |
| 1089 | list = variantListWithVariant(list, newVariant); |
| 1090 | |
| 1091 | // If there are any closure calls then it makes sense to treat all of them as closure calls. |
| 1092 | // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG; |
| 1093 | // the DFG doesn't really want to deal with a combination of closure and non-closure callees. |
| 1094 | bool isClosureCall = false; |
| 1095 | for (CallVariant variant : list) { |
| 1096 | if (variant.isClosureCall()) { |
| 1097 | list = despecifiedVariantList(list); |
| 1098 | isClosureCall = true; |
| 1099 | break; |
| 1100 | } |
| 1101 | } |
| 1102 | |
| 1103 | if (isClosureCall) |
| 1104 | callLinkInfo.setHasSeenClosure(); |
| 1105 | |
| 1106 | Vector<PolymorphicCallCase> callCases; |
| 1107 | Vector<int64_t> caseValues; |
| 1108 | |
| 1109 | // Figure out what our cases are. |
| 1110 | for (CallVariant variant : list) { |
| 1111 | CodeBlock* codeBlock = nullptr; |
| 1112 | if (variant.executable() && !variant.executable()->isHostFunction()) { |
| 1113 | ExecutableBase* executable = variant.executable(); |
| 1114 | codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall(); |
| 1115 | // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch, |
| 1116 | // assume that it's better for this whole thing to be a virtual call. |
| 1117 | if (!codeBlock || callFrame->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) { |
| 1118 | linkVirtualFor(vm, callFrame, callLinkInfo); |
| 1119 | return; |
| 1120 | } |
| 1121 | } |
| 1122 | |
| 1123 | int64_t newCaseValue = 0; |
| 1124 | if (isClosureCall) { |
| 1125 | newCaseValue = bitwise_cast<intptr_t>(variant.executable()); |
| 1126 | // FIXME: We could add a fast path for InternalFunction with closure call. |
| 1127 | // https://bugs.webkit.org/show_bug.cgi?id=179311 |
| 1128 | if (!newCaseValue) |
| 1129 | continue; |
| 1130 | } else { |
| 1131 | if (auto* function = variant.function()) |
| 1132 | newCaseValue = bitwise_cast<intptr_t>(function); |
| 1133 | else |
| 1134 | newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction()); |
| 1135 | } |
| 1136 | |
| 1137 | if (!ASSERT_DISABLED) { |
| 1138 | if (caseValues.contains(newCaseValue)) { |
| 1139 | dataLog("ERROR: Attempt to add duplicate case value.\n" ); |
| 1140 | dataLog("Existing case values: " ); |
| 1141 | CommaPrinter comma; |
| 1142 | for (auto& value : caseValues) |
| 1143 | dataLog(comma, value); |
| 1144 | dataLog("\n" ); |
| 1145 | dataLog("Attempting to add: " , newCaseValue, "\n" ); |
| 1146 | dataLog("Variant list: " , listDump(callCases), "\n" ); |
| 1147 | RELEASE_ASSERT_NOT_REACHED(); |
| 1148 | } |
| 1149 | } |
| 1150 | |
| 1151 | callCases.append(PolymorphicCallCase(variant, codeBlock)); |
| 1152 | caseValues.append(newCaseValue); |
| 1153 | } |
| 1154 | ASSERT(callCases.size() == caseValues.size()); |
| 1155 | |
| 1156 | // If we are over the limit, just use a normal virtual call. |
| 1157 | unsigned maxPolymorphicCallVariantListSize; |
| 1158 | if (isWebAssembly) |
| 1159 | maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS(); |
| 1160 | else if (callerCodeBlock->jitType() == JITCode::topTierJIT()) |
| 1161 | maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier(); |
| 1162 | else |
| 1163 | maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize(); |
| 1164 | |
| 1165 | // We use list.size() instead of callCases.size() because we respect CallVariant size for now. |
| 1166 | if (list.size() > maxPolymorphicCallVariantListSize) { |
| 1167 | linkVirtualFor(vm, callFrame, callLinkInfo); |
| 1168 | return; |
| 1169 | } |
| 1170 | |
| 1171 | Vector<CallToCodePtr> calls(callCases.size()); |
| 1172 | UniqueArray<uint32_t> fastCounts; |
| 1173 | |
| 1174 | if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT()) { |
| 1175 | fastCounts = makeUniqueArray<uint32_t>(callCases.size()); |
| 1176 | memset(fastCounts.get(), 0, callCases.size() * sizeof(uint32_t)); |
| 1177 | } |
| 1178 | |
| 1179 | GPRReg calleeGPR = callLinkInfo.calleeGPR(); |
| 1180 | |
| 1181 | CCallHelpers stubJit(callerCodeBlock); |
| 1182 | |
| 1183 | std::unique_ptr<CallFrameShuffler> frameShuffler; |
| 1184 | if (callLinkInfo.frameShuffleData()) { |
| 1185 | ASSERT(callLinkInfo.isTailCall()); |
| 1186 | frameShuffler = makeUnique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData()); |
| 1187 | #if USE(JSVALUE32_64) |
| 1188 | // We would have already checked that the callee is a cell, and we can |
| 1189 | // use the additional register this buys us. |
| 1190 | frameShuffler->assumeCalleeIsCell(); |
| 1191 | #endif |
| 1192 | frameShuffler->lockGPR(calleeGPR); |
| 1193 | } |
| 1194 | |
| 1195 | GPRReg comparisonValueGPR; |
| 1196 | if (isClosureCall) { |
| 1197 | if (frameShuffler) |
| 1198 | comparisonValueGPR = frameShuffler->acquireGPR(); |
| 1199 | else |
| 1200 | comparisonValueGPR = AssemblyHelpers::selectScratchGPR(calleeGPR); |
| 1201 | } else |
| 1202 | comparisonValueGPR = calleeGPR; |
| 1203 | |
| 1204 | GPRReg fastCountsBaseGPR; |
| 1205 | if (frameShuffler) |
| 1206 | fastCountsBaseGPR = frameShuffler->acquireGPR(); |
| 1207 | else { |
| 1208 | fastCountsBaseGPR = |
| 1209 | AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3); |
| 1210 | } |
| 1211 | stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR); |
| 1212 | |
| 1213 | if (!frameShuffler && callLinkInfo.isTailCall()) { |
| 1214 | // We strongly assume that calleeGPR is not a callee save register in the slow path. |
| 1215 | ASSERT(!callerCodeBlock->calleeSaveRegisters()->find(calleeGPR)); |
| 1216 | stubJit.emitRestoreCalleeSaves(); |
| 1217 | } |
| 1218 | |
| 1219 | CCallHelpers::JumpList slowPath; |
| 1220 | if (isClosureCall) { |
| 1221 | // Verify that we have a function and stash the executable in scratchGPR. |
| 1222 | #if USE(JSVALUE64) |
| 1223 | if (callLinkInfo.isTailCall()) |
| 1224 | slowPath.append(stubJit.branchIfNotCell(calleeGPR, DoNotHaveTagRegisters)); |
| 1225 | else |
| 1226 | slowPath.append(stubJit.branchIfNotCell(calleeGPR)); |
| 1227 | #else |
| 1228 | // We would have already checked that the callee is a cell. |
| 1229 | #endif |
| 1230 | // FIXME: We could add a fast path for InternalFunction with closure call. |
| 1231 | slowPath.append(stubJit.branchIfNotFunction(calleeGPR)); |
| 1232 | |
| 1233 | stubJit.loadPtr( |
| 1234 | CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()), |
| 1235 | comparisonValueGPR); |
| 1236 | } |
| 1237 | |
| 1238 | BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr); |
| 1239 | CCallHelpers::JumpList done; |
| 1240 | while (binarySwitch.advance(stubJit)) { |
| 1241 | size_t caseIndex = binarySwitch.caseIndex(); |
| 1242 | |
| 1243 | CallVariant variant = callCases[caseIndex].variant(); |
| 1244 | |
| 1245 | MacroAssemblerCodePtr<JSEntryPtrTag> codePtr; |
| 1246 | if (variant.executable()) { |
| 1247 | ASSERT(variant.executable()->hasJITCodeForCall()); |
| 1248 | |
| 1249 | codePtr = jsToWasmICCodePtr(vm, callLinkInfo.specializationKind(), variant.function()); |
| 1250 | if (!codePtr) |
| 1251 | codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired); |
| 1252 | } else { |
| 1253 | ASSERT(variant.internalFunction()); |
| 1254 | codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall); |
| 1255 | } |
| 1256 | |
| 1257 | if (fastCounts) { |
| 1258 | stubJit.add32( |
| 1259 | CCallHelpers::TrustedImm32(1), |
| 1260 | CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t))); |
| 1261 | } |
| 1262 | if (frameShuffler) { |
| 1263 | CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall(); |
| 1264 | calls[caseIndex].call = stubJit.nearTailCall(); |
| 1265 | } else if (callLinkInfo.isTailCall()) { |
| 1266 | stubJit.prepareForTailCallSlow(); |
| 1267 | calls[caseIndex].call = stubJit.nearTailCall(); |
| 1268 | } else |
| 1269 | calls[caseIndex].call = stubJit.nearCall(); |
| 1270 | calls[caseIndex].codePtr = codePtr; |
| 1271 | done.append(stubJit.jump()); |
| 1272 | } |
| 1273 | |
| 1274 | slowPath.link(&stubJit); |
| 1275 | binarySwitch.fallThrough().link(&stubJit); |
| 1276 | |
| 1277 | if (frameShuffler) { |
| 1278 | frameShuffler->releaseGPR(calleeGPR); |
| 1279 | frameShuffler->releaseGPR(comparisonValueGPR); |
| 1280 | frameShuffler->releaseGPR(fastCountsBaseGPR); |
| 1281 | #if USE(JSVALUE32_64) |
| 1282 | frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0)); |
| 1283 | #else |
| 1284 | frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0)); |
| 1285 | #endif |
| 1286 | frameShuffler->prepareForSlowPath(); |
| 1287 | } else { |
| 1288 | stubJit.move(calleeGPR, GPRInfo::regT0); |
| 1289 | #if USE(JSVALUE32_64) |
| 1290 | stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1); |
| 1291 | #endif |
| 1292 | } |
| 1293 | stubJit.move(CCallHelpers::TrustedImmPtr(globalObject), GPRInfo::regT3); |
| 1294 | stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2); |
| 1295 | stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4); |
| 1296 | |
| 1297 | stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4); |
| 1298 | AssemblyHelpers::Jump slow = stubJit.jump(); |
| 1299 | |
| 1300 | LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail); |
| 1301 | if (patchBuffer.didFailToAllocate()) { |
| 1302 | linkVirtualFor(vm, callFrame, callLinkInfo); |
| 1303 | return; |
| 1304 | } |
| 1305 | |
| 1306 | RELEASE_ASSERT(callCases.size() == calls.size()); |
| 1307 | for (CallToCodePtr callToCodePtr : calls) { |
| 1308 | #if CPU(ARM_THUMB2) |
| 1309 | // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address |
| 1310 | // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit. |
| 1311 | bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail); |
| 1312 | void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress(); |
| 1313 | patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::createFromExecutableAddress(target))); |
| 1314 | #else |
| 1315 | patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr)); |
| 1316 | #endif |
| 1317 | } |
| 1318 | if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType())) |
| 1319 | patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0)); |
| 1320 | else |
| 1321 | patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0)); |
| 1322 | patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code())); |
| 1323 | |
| 1324 | auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine( |
| 1325 | FINALIZE_CODE_FOR( |
| 1326 | callerCodeBlock, patchBuffer, JITStubRoutinePtrTag, |
| 1327 | "Polymorphic call stub for %s, return point %p, targets %s" , |
| 1328 | isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(), |
| 1329 | toCString(listDump(callCases)).data()), |
| 1330 | vm, owner, callFrame->callerFrame(), callLinkInfo, callCases, |
| 1331 | WTFMove(fastCounts))); |
| 1332 | |
| 1333 | MacroAssembler::replaceWithJump( |
| 1334 | MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()), |
| 1335 | CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code())); |
| 1336 | // The original slow path is unreachable on 64-bits, but still |
| 1337 | // reachable on 32-bits since a non-cell callee will always |
| 1338 | // trigger the slow path |
| 1339 | linkSlowFor(vm, callLinkInfo); |
| 1340 | |
| 1341 | // If there had been a previous stub routine, that one will die as soon as the GC runs and sees |
| 1342 | // that it's no longer on stack. |
| 1343 | callLinkInfo.setStub(WTFMove(stubRoutine)); |
| 1344 | |
| 1345 | // The call link info no longer has a call cache apart from the jump to the polymorphic call |
| 1346 | // stub. |
| 1347 | if (callLinkInfo.isOnList()) |
| 1348 | callLinkInfo.remove(); |
| 1349 | } |
| 1350 | |
| 1351 | void resetGetBy(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByKind kind) |
| 1352 | { |
| 1353 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByFunction(kind)); |
| 1354 | InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation()); |
| 1355 | } |
| 1356 | |
| 1357 | void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo) |
| 1358 | { |
| 1359 | V_JITOperation_GSsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_GSsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress()); |
| 1360 | V_JITOperation_GSsiJJI optimizedFunction; |
| 1361 | if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize) |
| 1362 | optimizedFunction = operationPutByIdStrictOptimize; |
| 1363 | else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize) |
| 1364 | optimizedFunction = operationPutByIdNonStrictOptimize; |
| 1365 | else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize) |
| 1366 | optimizedFunction = operationPutByIdDirectStrictOptimize; |
| 1367 | else { |
| 1368 | ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize); |
| 1369 | optimizedFunction = operationPutByIdDirectNonStrictOptimize; |
| 1370 | } |
| 1371 | |
| 1372 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction); |
| 1373 | InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation()); |
| 1374 | } |
| 1375 | |
| 1376 | static void resetPatchableJump(StructureStubInfo& stubInfo) |
| 1377 | { |
| 1378 | MacroAssembler::repatchJump(stubInfo.patchableJump(), stubInfo.slowPathStartLocation()); |
| 1379 | } |
| 1380 | |
| 1381 | void resetInByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo) |
| 1382 | { |
| 1383 | ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize); |
| 1384 | InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation()); |
| 1385 | } |
| 1386 | |
| 1387 | void resetInstanceOf(StructureStubInfo& stubInfo) |
| 1388 | { |
| 1389 | resetPatchableJump(stubInfo); |
| 1390 | } |
| 1391 | |
| 1392 | MacroAssemblerCodePtr<JSEntryPtrTag> jsToWasmICCodePtr(VM& vm, CodeSpecializationKind kind, JSObject* callee) |
| 1393 | { |
| 1394 | #if ENABLE(WEBASSEMBLY) |
| 1395 | if (!callee) |
| 1396 | return nullptr; |
| 1397 | if (kind != CodeForCall) |
| 1398 | return nullptr; |
| 1399 | if (auto* wasmFunction = jsDynamicCast<WebAssemblyFunction*>(vm, callee)) |
| 1400 | return wasmFunction->jsCallEntrypoint(); |
| 1401 | #else |
| 1402 | UNUSED_PARAM(vm); |
| 1403 | UNUSED_PARAM(kind); |
| 1404 | UNUSED_PARAM(callee); |
| 1405 | #endif |
| 1406 | return nullptr; |
| 1407 | } |
| 1408 | |
| 1409 | } // namespace JSC |
| 1410 | |
| 1411 | #endif |
| 1412 | |