1/*
2 * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "Repatch.h"
28
29#if ENABLE(JIT)
30
31#include "BinarySwitch.h"
32#include "CCallHelpers.h"
33#include "CallFrameShuffler.h"
34#include "DFGOperations.h"
35#include "DFGSpeculativeJIT.h"
36#include "DOMJITGetterSetter.h"
37#include "DirectArguments.h"
38#include "ExecutableBaseInlines.h"
39#include "FTLThunks.h"
40#include "FullCodeOrigin.h"
41#include "FunctionCodeBlock.h"
42#include "GCAwareJITStubRoutine.h"
43#include "GetterSetter.h"
44#include "GetterSetterAccessCase.h"
45#include "ICStats.h"
46#include "InlineAccess.h"
47#include "InstanceOfAccessCase.h"
48#include "IntrinsicGetterAccessCase.h"
49#include "JIT.h"
50#include "JITInlines.h"
51#include "JSCInlines.h"
52#include "JSModuleNamespaceObject.h"
53#include "JSWebAssembly.h"
54#include "LinkBuffer.h"
55#include "ModuleNamespaceAccessCase.h"
56#include "PolymorphicAccess.h"
57#include "ScopedArguments.h"
58#include "ScratchRegisterAllocator.h"
59#include "StackAlignment.h"
60#include "StructureRareDataInlines.h"
61#include "StructureStubClearingWatchpoint.h"
62#include "StructureStubInfo.h"
63#include "SuperSampler.h"
64#include "ThunkGenerators.h"
65#include <wtf/CommaPrinter.h>
66#include <wtf/ListDump.h>
67#include <wtf/StringPrintStream.h>
68
69namespace JSC {
70
71static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call)
72{
73 FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
74#if ENABLE(FTL_JIT)
75 if (codeBlock->jitType() == JITType::FTLJIT) {
76 MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>();
77 return codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>();
78 }
79#else
80 UNUSED_PARAM(codeBlock);
81#endif // ENABLE(FTL_JIT)
82 return target.retagged<CFunctionPtrTag>();
83}
84
85void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
86{
87#if ENABLE(FTL_JIT)
88 if (codeBlock->jitType() == JITType::FTLJIT) {
89 VM& vm = *codeBlock->vm();
90 FTL::Thunks& thunks = *vm.ftlThunks;
91 FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
92 auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>());
93 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
94 key = key.withCallTarget(newCalleeFunction);
95 MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(key).retaggedCode<OperationPtrTag>()));
96 return;
97 }
98#else // ENABLE(FTL_JIT)
99 UNUSED_PARAM(codeBlock);
100#endif // ENABLE(FTL_JIT)
101 MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>());
102}
103
104enum InlineCacheAction {
105 GiveUpOnCache,
106 RetryCacheLater,
107 AttemptToCache
108};
109
110static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
111{
112 Structure* structure = cell->structure(vm);
113
114 TypeInfo typeInfo = structure->typeInfo();
115 if (typeInfo.prohibitsPropertyCaching())
116 return GiveUpOnCache;
117
118 if (structure->isUncacheableDictionary()) {
119 if (structure->hasBeenFlattenedBefore())
120 return GiveUpOnCache;
121 // Flattening could have changed the offset, so return early for another try.
122 asObject(cell)->flattenDictionaryObject(vm);
123 return RetryCacheLater;
124 }
125
126 if (!structure->propertyAccessesAreCacheable())
127 return GiveUpOnCache;
128
129 return AttemptToCache;
130}
131
132static bool forceICFailure(ExecState*)
133{
134 return Options::forceICFailure();
135}
136
137ALWAYS_INLINE static void fireWatchpointsAndClearStubIfNeeded(VM& vm, StructureStubInfo& stubInfo, CodeBlock* codeBlock, AccessGenerationResult& result)
138{
139 if (result.shouldResetStubAndFireWatchpoints()) {
140 result.fireWatchpoints(vm);
141 stubInfo.reset(codeBlock);
142 }
143}
144
145inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByIdFunction(GetByIDKind kind)
146{
147 switch (kind) {
148 case GetByIDKind::Normal:
149 return operationGetByIdOptimize;
150 case GetByIDKind::WithThis:
151 return operationGetByIdWithThisOptimize;
152 case GetByIDKind::Try:
153 return operationTryGetByIdOptimize;
154 case GetByIDKind::Direct:
155 return operationGetByIdDirectOptimize;
156 }
157 ASSERT_NOT_REACHED();
158 return operationGetById;
159}
160
161inline FunctionPtr<CFunctionPtrTag> appropriateGetByIdFunction(GetByIDKind kind)
162{
163 switch (kind) {
164 case GetByIDKind::Normal:
165 return operationGetById;
166 case GetByIDKind::WithThis:
167 return operationGetByIdWithThis;
168 case GetByIDKind::Try:
169 return operationTryGetById;
170 case GetByIDKind::Direct:
171 return operationGetByIdDirect;
172 }
173 ASSERT_NOT_REACHED();
174 return operationGetById;
175}
176
177static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
178{
179 VM& vm = exec->vm();
180 AccessGenerationResult result;
181
182 {
183 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
184
185 if (forceICFailure(exec))
186 return GiveUpOnCache;
187
188 // FIXME: Cache property access for immediates.
189 if (!baseValue.isCell())
190 return GiveUpOnCache;
191 JSCell* baseCell = baseValue.asCell();
192
193 CodeBlock* codeBlock = exec->codeBlock();
194
195 std::unique_ptr<AccessCase> newCase;
196
197 if (propertyName == vm.propertyNames->length) {
198 if (isJSArray(baseCell)) {
199 if (stubInfo.cacheType == CacheType::Unset
200 && slot.slotBase() == baseCell
201 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseCell))) {
202
203 bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
204 if (generatedCodeInline) {
205 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
206 stubInfo.initArrayLength();
207 return RetryCacheLater;
208 }
209 }
210
211 newCase = AccessCase::create(vm, codeBlock, AccessCase::ArrayLength);
212 } else if (isJSString(baseCell)) {
213 if (stubInfo.cacheType == CacheType::Unset && InlineAccess::isCacheableStringLength(stubInfo)) {
214 bool generatedCodeInline = InlineAccess::generateStringLength(stubInfo);
215 if (generatedCodeInline) {
216 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
217 stubInfo.initStringLength();
218 return RetryCacheLater;
219 }
220 }
221
222 newCase = AccessCase::create(vm, codeBlock, AccessCase::StringLength);
223 }
224 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(vm, baseCell)) {
225 // If there were overrides, then we can handle this as a normal property load! Guarding
226 // this with such a check enables us to add an IC case for that load if needed.
227 if (!arguments->overrodeThings())
228 newCase = AccessCase::create(vm, codeBlock, AccessCase::DirectArgumentsLength);
229 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(vm, baseCell)) {
230 // Ditto.
231 if (!arguments->overrodeThings())
232 newCase = AccessCase::create(vm, codeBlock, AccessCase::ScopedArgumentsLength);
233 }
234 }
235
236 if (!propertyName.isSymbol() && baseCell->inherits<JSModuleNamespaceObject>(vm) && !slot.isUnset()) {
237 if (auto moduleNamespaceSlot = slot.moduleNamespaceSlot())
238 newCase = ModuleNamespaceAccessCase::create(vm, codeBlock, jsCast<JSModuleNamespaceObject*>(baseCell), moduleNamespaceSlot->environment, ScopeOffset(moduleNamespaceSlot->scopeOffset));
239 }
240
241 if (!newCase) {
242 if (!slot.isCacheable() && !slot.isUnset())
243 return GiveUpOnCache;
244
245 ObjectPropertyConditionSet conditionSet;
246 Structure* structure = baseCell->structure(vm);
247
248 bool loadTargetFromProxy = false;
249 if (baseCell->type() == PureForwardingProxyType) {
250 baseValue = jsCast<JSProxy*>(baseCell)->target();
251 baseCell = baseValue.asCell();
252 structure = baseCell->structure(vm);
253 loadTargetFromProxy = true;
254 }
255
256 InlineCacheAction action = actionForCell(vm, baseCell);
257 if (action != AttemptToCache)
258 return action;
259
260 // Optimize self access.
261 if (stubInfo.cacheType == CacheType::Unset
262 && slot.isCacheableValue()
263 && slot.slotBase() == baseValue
264 && !slot.watchpointSet()
265 && !structure->needImpurePropertyWatchpoint()
266 && !loadTargetFromProxy) {
267
268 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(stubInfo, structure, slot.cachedOffset());
269 if (generatedCodeInline) {
270 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName, slot.slotBase() == baseValue));
271 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
272 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
273 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
274 return RetryCacheLater;
275 }
276 }
277
278 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
279
280 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
281
282 if (slot.isUnset() || slot.slotBase() != baseValue) {
283 if (structure->typeInfo().prohibitsPropertyCaching())
284 return GiveUpOnCache;
285
286 if (structure->isDictionary()) {
287 if (structure->hasBeenFlattenedBefore())
288 return GiveUpOnCache;
289 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
290 }
291
292 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
293 return GiveUpOnCache;
294
295 // If a kind is GetByIDKind::Direct, we do not need to investigate prototype chains further.
296 // Cacheability just depends on the head structure.
297 if (kind != GetByIDKind::Direct) {
298 bool usesPolyProto;
299 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot, usesPolyProto);
300 if (!prototypeAccessChain) {
301 // It's invalid to access this prototype property.
302 return GiveUpOnCache;
303 }
304
305 if (!usesPolyProto) {
306 // We use ObjectPropertyConditionSet instead for faster accesses.
307 prototypeAccessChain = nullptr;
308
309 // FIXME: Maybe this `if` should be inside generateConditionsForPropertyBlah.
310 // https://bugs.webkit.org/show_bug.cgi?id=185215
311 if (slot.isUnset()) {
312 conditionSet = generateConditionsForPropertyMiss(
313 vm, codeBlock, exec, structure, propertyName.impl());
314 } else if (!slot.isCacheableCustom()) {
315 conditionSet = generateConditionsForPrototypePropertyHit(
316 vm, codeBlock, exec, structure, slot.slotBase(),
317 propertyName.impl());
318 } else {
319 conditionSet = generateConditionsForPrototypePropertyHitCustom(
320 vm, codeBlock, exec, structure, slot.slotBase(),
321 propertyName.impl());
322 }
323
324 if (!conditionSet.isValid())
325 return GiveUpOnCache;
326 }
327 }
328
329 offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
330 }
331
332 JSFunction* getter = nullptr;
333 if (slot.isCacheableGetter())
334 getter = jsDynamicCast<JSFunction*>(vm, slot.getterSetter()->getter());
335
336 Optional<DOMAttributeAnnotation> domAttribute;
337 if (slot.isCacheableCustom() && slot.domAttribute())
338 domAttribute = slot.domAttribute();
339
340 if (kind == GetByIDKind::Try) {
341 AccessCase::AccessType type;
342 if (slot.isCacheableValue())
343 type = AccessCase::Load;
344 else if (slot.isUnset())
345 type = AccessCase::Miss;
346 else if (slot.isCacheableGetter())
347 type = AccessCase::GetGetter;
348 else
349 RELEASE_ASSERT_NOT_REACHED();
350
351 newCase = ProxyableAccessCase::create(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
352 } else if (!loadTargetFromProxy && getter && IntrinsicGetterAccessCase::canEmitIntrinsicGetter(getter, structure))
353 newCase = IntrinsicGetterAccessCase::create(vm, codeBlock, slot.cachedOffset(), structure, conditionSet, getter, WTFMove(prototypeAccessChain));
354 else {
355 if (slot.isCacheableValue() || slot.isUnset()) {
356 newCase = ProxyableAccessCase::create(vm, codeBlock, slot.isUnset() ? AccessCase::Miss : AccessCase::Load,
357 offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet(), WTFMove(prototypeAccessChain));
358 } else {
359 AccessCase::AccessType type;
360 if (slot.isCacheableGetter())
361 type = AccessCase::Getter;
362 else if (slot.attributes() & PropertyAttribute::CustomAccessor)
363 type = AccessCase::CustomAccessorGetter;
364 else
365 type = AccessCase::CustomValueGetter;
366
367 if (kind == GetByIDKind::WithThis && type == AccessCase::CustomAccessorGetter && domAttribute)
368 return GiveUpOnCache;
369
370 newCase = GetterSetterAccessCase::create(
371 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
372 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
373 slot.isCacheableCustom() && slot.slotBase() != baseValue ? slot.slotBase() : nullptr,
374 domAttribute, WTFMove(prototypeAccessChain));
375 }
376 }
377 }
378
379 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue));
380
381 result = stubInfo.addAccessCase(locker, codeBlock, propertyName, WTFMove(newCase));
382
383 if (result.generatedSomeCode()) {
384 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName, slot.slotBase() == baseValue));
385
386 RELEASE_ASSERT(result.code());
387 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
388 }
389 }
390
391 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
392
393 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
394}
395
396void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
397{
398 SuperSamplerScope superSamplerScope(false);
399
400 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
401 CodeBlock* codeBlock = exec->codeBlock();
402 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind));
403 }
404}
405
406static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
407{
408 if (slot.isStrictMode()) {
409 if (putKind == Direct)
410 return operationPutByIdDirectStrict;
411 return operationPutByIdStrict;
412 }
413 if (putKind == Direct)
414 return operationPutByIdDirectNonStrict;
415 return operationPutByIdNonStrict;
416}
417
418static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
419{
420 if (slot.isStrictMode()) {
421 if (putKind == Direct)
422 return operationPutByIdDirectStrictOptimize;
423 return operationPutByIdStrictOptimize;
424 }
425 if (putKind == Direct)
426 return operationPutByIdDirectNonStrictOptimize;
427 return operationPutByIdNonStrictOptimize;
428}
429
430static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
431{
432 VM& vm = exec->vm();
433 AccessGenerationResult result;
434 {
435 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
436
437 if (forceICFailure(exec))
438 return GiveUpOnCache;
439
440 CodeBlock* codeBlock = exec->codeBlock();
441
442 if (!baseValue.isCell())
443 return GiveUpOnCache;
444
445 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
446 return GiveUpOnCache;
447
448 // FIXME: We should try to do something smarter here...
449 if (isCopyOnWrite(structure->indexingMode()))
450 return GiveUpOnCache;
451 // We can't end up storing to a CoW on the prototype since it shouldn't own properties.
452 ASSERT(!isCopyOnWrite(slot.base()->indexingMode()));
453
454 if (!structure->propertyAccessesAreCacheable())
455 return GiveUpOnCache;
456
457 std::unique_ptr<AccessCase> newCase;
458 JSCell* baseCell = baseValue.asCell();
459
460 if (slot.base() == baseValue && slot.isCacheablePut()) {
461 if (slot.type() == PutPropertySlot::ExistingProperty) {
462 // This assert helps catch bugs if we accidentally forget to disable caching
463 // when we transition then store to an existing property. This is common among
464 // paths that reify lazy properties. If we reify a lazy property and forget
465 // to disable caching, we may come down this path. The Replace IC does not
466 // know how to model these types of structure transitions (or any structure
467 // transition for that matter).
468 RELEASE_ASSERT(baseValue.asCell()->structure(vm) == structure);
469
470 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
471
472 if (stubInfo.cacheType == CacheType::Unset
473 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
474 && !structure->needImpurePropertyWatchpoint()) {
475
476 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
477 if (generatedCodeInline) {
478 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident, slot.base() == baseValue));
479 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
480 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
481 return RetryCacheLater;
482 }
483 }
484
485 newCase = AccessCase::create(vm, codeBlock, AccessCase::Replace, slot.cachedOffset(), structure);
486 } else {
487 ASSERT(slot.type() == PutPropertySlot::NewProperty);
488
489 if (!structure->isObject())
490 return GiveUpOnCache;
491
492 if (structure->isDictionary()) {
493 if (structure->hasBeenFlattenedBefore())
494 return GiveUpOnCache;
495 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
496 }
497
498 PropertyOffset offset;
499 Structure* newStructure =
500 Structure::addPropertyTransitionToExistingStructureConcurrently(
501 structure, ident.impl(), 0, offset);
502 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
503 return GiveUpOnCache;
504
505 ASSERT(newStructure->previousID() == structure);
506 ASSERT(!newStructure->isDictionary());
507 ASSERT(newStructure->isObject());
508
509 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
510 ObjectPropertyConditionSet conditionSet;
511 if (putKind == NotDirect) {
512 bool usesPolyProto;
513 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, nullptr, usesPolyProto);
514 if (!prototypeAccessChain) {
515 // It's invalid to access this prototype property.
516 return GiveUpOnCache;
517 }
518
519 if (!usesPolyProto) {
520 prototypeAccessChain = nullptr;
521 conditionSet =
522 generateConditionsForPropertySetterMiss(
523 vm, codeBlock, exec, newStructure, ident.impl());
524 if (!conditionSet.isValid())
525 return GiveUpOnCache;
526 }
527
528 }
529
530 newCase = AccessCase::create(vm, codeBlock, offset, structure, newStructure, conditionSet, WTFMove(prototypeAccessChain));
531 }
532 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
533 if (slot.isCacheableCustom()) {
534 ObjectPropertyConditionSet conditionSet;
535 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
536
537 if (slot.base() != baseValue) {
538 bool usesPolyProto;
539 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
540 if (!prototypeAccessChain) {
541 // It's invalid to access this prototype property.
542 return GiveUpOnCache;
543 }
544
545 if (!usesPolyProto) {
546 prototypeAccessChain = nullptr;
547 conditionSet =
548 generateConditionsForPrototypePropertyHitCustom(
549 vm, codeBlock, exec, structure, slot.base(), ident.impl());
550 if (!conditionSet.isValid())
551 return GiveUpOnCache;
552 }
553 }
554
555 newCase = GetterSetterAccessCase::create(
556 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset,
557 conditionSet, WTFMove(prototypeAccessChain), slot.customSetter(), slot.base() != baseValue ? slot.base() : nullptr);
558 } else {
559 ObjectPropertyConditionSet conditionSet;
560 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
561 PropertyOffset offset = slot.cachedOffset();
562
563 if (slot.base() != baseValue) {
564 bool usesPolyProto;
565 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), baseCell, slot.base(), usesPolyProto);
566 if (!prototypeAccessChain) {
567 // It's invalid to access this prototype property.
568 return GiveUpOnCache;
569 }
570
571 if (!usesPolyProto) {
572 prototypeAccessChain = nullptr;
573 conditionSet =
574 generateConditionsForPrototypePropertyHit(
575 vm, codeBlock, exec, structure, slot.base(), ident.impl());
576 if (!conditionSet.isValid())
577 return GiveUpOnCache;
578
579 if (!(conditionSet.slotBaseCondition().attributes() & PropertyAttribute::Accessor))
580 return GiveUpOnCache;
581
582 offset = conditionSet.slotBaseCondition().offset();
583 }
584
585 }
586
587 newCase = GetterSetterAccessCase::create(
588 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet, WTFMove(prototypeAccessChain));
589 }
590 }
591
592 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident, slot.base() == baseValue));
593
594 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
595
596 if (result.generatedSomeCode()) {
597 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident, slot.base() == baseValue));
598
599 RELEASE_ASSERT(result.code());
600
601 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
602 }
603 }
604
605 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
606
607 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
608}
609
610void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
611{
612 SuperSamplerScope superSamplerScope(false);
613
614 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
615 CodeBlock* codeBlock = exec->codeBlock();
616 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
617 }
618}
619
620static InlineCacheAction tryCacheInByID(
621 ExecState* exec, JSObject* base, const Identifier& ident,
622 bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
623{
624 VM& vm = exec->vm();
625 AccessGenerationResult result;
626
627 {
628 GCSafeConcurrentJSLocker locker(exec->codeBlock()->m_lock, vm.heap);
629 if (forceICFailure(exec))
630 return GiveUpOnCache;
631
632 if (!base->structure(vm)->propertyAccessesAreCacheable() || (!wasFound && !base->structure(vm)->propertyAccessesAreCacheableForAbsence()))
633 return GiveUpOnCache;
634
635 if (wasFound) {
636 if (!slot.isCacheable())
637 return GiveUpOnCache;
638 }
639
640 CodeBlock* codeBlock = exec->codeBlock();
641 Structure* structure = base->structure(vm);
642
643 std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain;
644 ObjectPropertyConditionSet conditionSet;
645 if (wasFound) {
646 InlineCacheAction action = actionForCell(vm, base);
647 if (action != AttemptToCache)
648 return action;
649
650 // Optimize self access.
651 if (stubInfo.cacheType == CacheType::Unset
652 && slot.isCacheableValue()
653 && slot.slotBase() == base
654 && !slot.watchpointSet()
655 && !structure->needImpurePropertyWatchpoint()) {
656 bool generatedCodeInline = InlineAccess::generateSelfInAccess(stubInfo, structure);
657 if (generatedCodeInline) {
658 LOG_IC((ICEvent::InByIdSelfPatch, structure->classInfo(), ident, slot.slotBase() == base));
659 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
660 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
661 stubInfo.initInByIdSelf(codeBlock, structure, slot.cachedOffset());
662 return RetryCacheLater;
663 }
664 }
665
666 if (slot.slotBase() != base) {
667 bool usesPolyProto;
668 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
669 if (!prototypeAccessChain) {
670 // It's invalid to access this prototype property.
671 return GiveUpOnCache;
672 }
673 if (!usesPolyProto) {
674 prototypeAccessChain = nullptr;
675 conditionSet = generateConditionsForPrototypePropertyHit(
676 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
677 }
678 }
679 } else {
680 bool usesPolyProto;
681 prototypeAccessChain = PolyProtoAccessChain::create(exec->lexicalGlobalObject(), base, slot, usesPolyProto);
682 if (!prototypeAccessChain) {
683 // It's invalid to access this prototype property.
684 return GiveUpOnCache;
685 }
686
687 if (!usesPolyProto) {
688 prototypeAccessChain = nullptr;
689 conditionSet = generateConditionsForPropertyMiss(
690 vm, codeBlock, exec, structure, ident.impl());
691 }
692 }
693 if (!conditionSet.isValid())
694 return GiveUpOnCache;
695
696 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident, slot.slotBase() == base));
697
698 std::unique_ptr<AccessCase> newCase = AccessCase::create(
699 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, wasFound ? slot.cachedOffset() : invalidOffset, structure, conditionSet, WTFMove(prototypeAccessChain));
700
701 result = stubInfo.addAccessCase(locker, codeBlock, ident, WTFMove(newCase));
702
703 if (result.generatedSomeCode()) {
704 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident, slot.slotBase() == base));
705
706 RELEASE_ASSERT(result.code());
707 InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
708 }
709 }
710
711 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, exec->codeBlock(), result);
712
713 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
714}
715
716void repatchInByID(ExecState* exec, JSObject* baseObject, const Identifier& propertyName, bool wasFound, const PropertySlot& slot, StructureStubInfo& stubInfo)
717{
718 SuperSamplerScope superSamplerScope(false);
719
720 if (tryCacheInByID(exec, baseObject, propertyName, wasFound, slot, stubInfo) == GiveUpOnCache) {
721 CodeBlock* codeBlock = exec->codeBlock();
722 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInById);
723 }
724}
725
726static InlineCacheAction tryCacheInstanceOf(
727 ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
728 bool wasFound)
729{
730 VM& vm = exec->vm();
731 CodeBlock* codeBlock = exec->codeBlock();
732 AccessGenerationResult result;
733
734 RELEASE_ASSERT(valueValue.isCell()); // shouldConsiderCaching rejects non-cells.
735
736 if (forceICFailure(exec))
737 return GiveUpOnCache;
738
739 {
740 GCSafeConcurrentJSLocker locker(codeBlock->m_lock, vm.heap);
741
742 JSCell* value = valueValue.asCell();
743 Structure* structure = value->structure(vm);
744 std::unique_ptr<AccessCase> newCase;
745 JSObject* prototype = jsDynamicCast<JSObject*>(vm, prototypeValue);
746 if (prototype) {
747 if (!jsDynamicCast<JSObject*>(vm, value)) {
748 newCase = InstanceOfAccessCase::create(
749 vm, codeBlock, AccessCase::InstanceOfMiss, structure, ObjectPropertyConditionSet(),
750 prototype);
751 } else if (structure->prototypeQueriesAreCacheable()) {
752 // FIXME: Teach this to do poly proto.
753 // https://bugs.webkit.org/show_bug.cgi?id=185663
754
755 ObjectPropertyConditionSet conditionSet = generateConditionsForInstanceOf(
756 vm, codeBlock, exec, structure, prototype, wasFound);
757
758 if (conditionSet.isValid()) {
759 newCase = InstanceOfAccessCase::create(
760 vm, codeBlock,
761 wasFound ? AccessCase::InstanceOfHit : AccessCase::InstanceOfMiss,
762 structure, conditionSet, prototype);
763 }
764 }
765 }
766
767 if (!newCase)
768 newCase = AccessCase::create(vm, codeBlock, AccessCase::InstanceOfGeneric);
769
770 LOG_IC((ICEvent::InstanceOfAddAccessCase, structure->classInfo(), Identifier()));
771
772 result = stubInfo.addAccessCase(locker, codeBlock, Identifier(), WTFMove(newCase));
773
774 if (result.generatedSomeCode()) {
775 LOG_IC((ICEvent::InstanceOfReplaceWithJump, structure->classInfo(), Identifier()));
776
777 RELEASE_ASSERT(result.code());
778
779 MacroAssembler::repatchJump(
780 stubInfo.patchableJump(),
781 CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
782 }
783 }
784
785 fireWatchpointsAndClearStubIfNeeded(vm, stubInfo, codeBlock, result);
786
787 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
788}
789
790void repatchInstanceOf(
791 ExecState* exec, JSValue valueValue, JSValue prototypeValue, StructureStubInfo& stubInfo,
792 bool wasFound)
793{
794 SuperSamplerScope superSamplerScope(false);
795 if (tryCacheInstanceOf(exec, valueValue, prototypeValue, stubInfo, wasFound) == GiveUpOnCache)
796 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationInstanceOfGeneric);
797}
798
799static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
800{
801 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code()));
802}
803
804static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
805{
806 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator).retagged<JITStubRoutinePtrTag>());
807}
808
809static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
810{
811 MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo);
812 linkSlowFor(vm, callLinkInfo, virtualThunk);
813 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
814}
815
816static JSCell* webAssemblyOwner(JSCell* callee)
817{
818#if ENABLE(WEBASSEMBLY)
819 // Each WebAssembly.Instance shares the stubs from their WebAssembly.Module, which are therefore the appropriate owner.
820 return jsCast<WebAssemblyToJSCallee*>(callee)->module();
821#else
822 UNUSED_PARAM(callee);
823 RELEASE_ASSERT_NOT_REACHED();
824 return nullptr;
825#endif // ENABLE(WEBASSEMBLY)
826}
827
828void linkFor(
829 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
830 JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
831{
832 ASSERT(!callLinkInfo.stub());
833
834 CallFrame* callerFrame = exec->callerFrame();
835 // Our caller must have a cell for a callee. When calling
836 // this from Wasm, we ensure the callee is a cell.
837 ASSERT(callerFrame->callee().isCell());
838
839 VM& vm = callerFrame->vm();
840 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
841
842 // WebAssembly -> JS stubs don't have a valid CodeBlock.
843 JSCell* owner = isWebAssemblyToJSCallee(callerFrame->callee().asCell()) ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
844 ASSERT(owner);
845
846 ASSERT(!callLinkInfo.isLinked());
847 callLinkInfo.setCallee(vm, owner, callee);
848 MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), callee);
849 callLinkInfo.setLastSeenCallee(vm, owner, callee);
850 if (shouldDumpDisassemblyFor(callerCodeBlock))
851 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
852
853 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
854
855 if (calleeCodeBlock)
856 calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
857
858 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
859 linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
860 return;
861 }
862
863 linkSlowFor(&vm, callLinkInfo);
864}
865
866void linkDirectFor(
867 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
868 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
869{
870 ASSERT(!callLinkInfo.stub());
871
872 CodeBlock* callerCodeBlock = exec->codeBlock();
873
874 VM* vm = callerCodeBlock->vm();
875
876 ASSERT(!callLinkInfo.isLinked());
877 callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
878 if (shouldDumpDisassemblyFor(callerCodeBlock))
879 dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
880
881 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
882 MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
883 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
884
885 if (calleeCodeBlock)
886 calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
887}
888
889void linkSlowFor(
890 ExecState* exec, CallLinkInfo& callLinkInfo)
891{
892 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
893 VM* vm = callerCodeBlock->vm();
894
895 linkSlowFor(vm, callLinkInfo);
896}
897
898static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
899{
900 if (callLinkInfo.isDirect()) {
901 callLinkInfo.clearCodeBlock();
902 if (!callLinkInfo.clearedByJettison()) {
903 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
904 MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
905 else
906 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
907 }
908 } else {
909 if (!callLinkInfo.clearedByJettison()) {
910 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
911 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
912 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
913 linkSlowFor(vm, callLinkInfo, codeRef);
914 MacroAssembler::repatchPointer(callLinkInfo.hotPathBegin(), nullptr);
915 }
916 callLinkInfo.clearCallee();
917 }
918 callLinkInfo.clearSeen();
919 callLinkInfo.clearStub();
920 callLinkInfo.clearSlowStub();
921 if (callLinkInfo.isOnList())
922 callLinkInfo.remove();
923}
924
925void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
926{
927 if (Options::dumpDisassembly())
928 dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
929
930 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>());
931}
932
933void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
934{
935 CallFrame* callerFrame = exec->callerFrame();
936 VM& vm = callerFrame->vm();
937 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
938
939 if (shouldDumpDisassemblyFor(callerCodeBlock))
940 dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
941
942 MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, callLinkInfo);
943 revertCall(&vm, callLinkInfo, virtualThunk);
944 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
945 callLinkInfo.setClearedByVirtual();
946}
947
948namespace {
949struct CallToCodePtr {
950 CCallHelpers::Call call;
951 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
952};
953} // annonymous namespace
954
955void linkPolymorphicCall(
956 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
957{
958 RELEASE_ASSERT(callLinkInfo.allowStubs());
959
960 CallFrame* callerFrame = exec->callerFrame();
961 VM& vm = callerFrame->vm();
962
963 // During execution of linkPolymorphicCall, we strongly assume that we never do GC.
964 // GC jettisons CodeBlocks, changes CallLinkInfo etc. and breaks assumption done before and after this call.
965 DeferGCForAWhile deferGCForAWhile(vm.heap);
966
967 if (!newVariant) {
968 linkVirtualFor(exec, callLinkInfo);
969 return;
970 }
971
972 // Our caller must be have a cell for a callee. When calling
973 // this from Wasm, we ensure the callee is a cell.
974 ASSERT(callerFrame->callee().isCell());
975
976 CodeBlock* callerCodeBlock = callerFrame->codeBlock();
977 bool isWebAssembly = isWebAssemblyToJSCallee(callerFrame->callee().asCell());
978
979 // WebAssembly -> JS stubs don't have a valid CodeBlock.
980 JSCell* owner = isWebAssembly ? webAssemblyOwner(callerFrame->callee().asCell()) : callerCodeBlock;
981 ASSERT(owner);
982
983 CallVariantList list;
984 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
985 list = stub->variants();
986 else if (JSObject* oldCallee = callLinkInfo.callee())
987 list = CallVariantList { CallVariant(oldCallee) };
988
989 list = variantListWithVariant(list, newVariant);
990
991 // If there are any closure calls then it makes sense to treat all of them as closure calls.
992 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
993 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
994 bool isClosureCall = false;
995 for (CallVariant variant : list) {
996 if (variant.isClosureCall()) {
997 list = despecifiedVariantList(list);
998 isClosureCall = true;
999 break;
1000 }
1001 }
1002
1003 if (isClosureCall)
1004 callLinkInfo.setHasSeenClosure();
1005
1006 Vector<PolymorphicCallCase> callCases;
1007
1008 // Figure out what our cases are.
1009 for (CallVariant variant : list) {
1010 CodeBlock* codeBlock = nullptr;
1011 if (variant.executable() && !variant.executable()->isHostFunction()) {
1012 ExecutableBase* executable = variant.executable();
1013 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
1014 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
1015 // assume that it's better for this whole thing to be a virtual call.
1016 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
1017 linkVirtualFor(exec, callLinkInfo);
1018 return;
1019 }
1020 }
1021
1022 callCases.append(PolymorphicCallCase(variant, codeBlock));
1023 }
1024
1025 // If we are over the limit, just use a normal virtual call.
1026 unsigned maxPolymorphicCallVariantListSize;
1027 if (isWebAssembly)
1028 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForWebAssemblyToJS();
1029 else if (callerCodeBlock->jitType() == JITCode::topTierJIT())
1030 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
1031 else
1032 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
1033
1034 if (list.size() > maxPolymorphicCallVariantListSize) {
1035 linkVirtualFor(exec, callLinkInfo);
1036 return;
1037 }
1038
1039 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
1040
1041 CCallHelpers stubJit(callerCodeBlock);
1042
1043 CCallHelpers::JumpList slowPath;
1044
1045 std::unique_ptr<CallFrameShuffler> frameShuffler;
1046 if (callLinkInfo.frameShuffleData()) {
1047 ASSERT(callLinkInfo.isTailCall());
1048 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
1049#if USE(JSVALUE32_64)
1050 // We would have already checked that the callee is a cell, and we can
1051 // use the additional register this buys us.
1052 frameShuffler->assumeCalleeIsCell();
1053#endif
1054 frameShuffler->lockGPR(calleeGPR);
1055 }
1056 GPRReg comparisonValueGPR;
1057
1058 if (isClosureCall) {
1059 GPRReg scratchGPR;
1060 if (frameShuffler)
1061 scratchGPR = frameShuffler->acquireGPR();
1062 else
1063 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
1064 // Verify that we have a function and stash the executable in scratchGPR.
1065
1066#if USE(JSVALUE64)
1067 slowPath.append(stubJit.branchIfNotCell(calleeGPR));
1068#else
1069 // We would have already checked that the callee is a cell.
1070#endif
1071
1072 // FIXME: We could add a fast path for InternalFunction with closure call.
1073 slowPath.append(stubJit.branchIfNotFunction(calleeGPR));
1074
1075 stubJit.loadPtr(
1076 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1077 scratchGPR);
1078
1079 comparisonValueGPR = scratchGPR;
1080 } else
1081 comparisonValueGPR = calleeGPR;
1082
1083 Vector<int64_t> caseValues(callCases.size());
1084 Vector<CallToCodePtr> calls(callCases.size());
1085 UniqueArray<uint32_t> fastCounts;
1086
1087 if (!isWebAssembly && callerCodeBlock->jitType() != JITCode::topTierJIT())
1088 fastCounts = makeUniqueArray<uint32_t>(callCases.size());
1089
1090 for (size_t i = 0; i < callCases.size(); ++i) {
1091 if (fastCounts)
1092 fastCounts[i] = 0;
1093
1094 CallVariant variant = callCases[i].variant();
1095 int64_t newCaseValue = 0;
1096 if (isClosureCall) {
1097 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
1098 // FIXME: We could add a fast path for InternalFunction with closure call.
1099 // https://bugs.webkit.org/show_bug.cgi?id=179311
1100 if (!newCaseValue)
1101 continue;
1102 } else {
1103 if (auto* function = variant.function())
1104 newCaseValue = bitwise_cast<intptr_t>(function);
1105 else
1106 newCaseValue = bitwise_cast<intptr_t>(variant.internalFunction());
1107 }
1108
1109 if (!ASSERT_DISABLED) {
1110 for (size_t j = 0; j < i; ++j) {
1111 if (caseValues[j] != newCaseValue)
1112 continue;
1113
1114 dataLog("ERROR: Attempt to add duplicate case value.\n");
1115 dataLog("Existing case values: ");
1116 CommaPrinter comma;
1117 for (size_t k = 0; k < i; ++k)
1118 dataLog(comma, caseValues[k]);
1119 dataLog("\n");
1120 dataLog("Attempting to add: ", newCaseValue, "\n");
1121 dataLog("Variant list: ", listDump(callCases), "\n");
1122 RELEASE_ASSERT_NOT_REACHED();
1123 }
1124 }
1125
1126 caseValues[i] = newCaseValue;
1127 }
1128
1129 GPRReg fastCountsBaseGPR;
1130 if (frameShuffler)
1131 fastCountsBaseGPR = frameShuffler->acquireGPR();
1132 else {
1133 fastCountsBaseGPR =
1134 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
1135 }
1136 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
1137 if (!frameShuffler && callLinkInfo.isTailCall())
1138 stubJit.emitRestoreCalleeSaves();
1139 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
1140 CCallHelpers::JumpList done;
1141 while (binarySwitch.advance(stubJit)) {
1142 size_t caseIndex = binarySwitch.caseIndex();
1143
1144 CallVariant variant = callCases[caseIndex].variant();
1145
1146 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1147 if (variant.executable()) {
1148 ASSERT(variant.executable()->hasJITCodeForCall());
1149
1150 codePtr = jsToWasmICCodePtr(vm, callLinkInfo.specializationKind(), variant.function());
1151 if (!codePtr)
1152 codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
1153 } else {
1154 ASSERT(variant.internalFunction());
1155 codePtr = vm.getCTIInternalFunctionTrampolineFor(CodeForCall);
1156 }
1157
1158 if (fastCounts) {
1159 stubJit.add32(
1160 CCallHelpers::TrustedImm32(1),
1161 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
1162 }
1163 if (frameShuffler) {
1164 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
1165 calls[caseIndex].call = stubJit.nearTailCall();
1166 } else if (callLinkInfo.isTailCall()) {
1167 stubJit.prepareForTailCallSlow();
1168 calls[caseIndex].call = stubJit.nearTailCall();
1169 } else
1170 calls[caseIndex].call = stubJit.nearCall();
1171 calls[caseIndex].codePtr = codePtr;
1172 done.append(stubJit.jump());
1173 }
1174
1175 slowPath.link(&stubJit);
1176 binarySwitch.fallThrough().link(&stubJit);
1177
1178 if (frameShuffler) {
1179 frameShuffler->releaseGPR(calleeGPR);
1180 frameShuffler->releaseGPR(comparisonValueGPR);
1181 frameShuffler->releaseGPR(fastCountsBaseGPR);
1182#if USE(JSVALUE32_64)
1183 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
1184#else
1185 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
1186#endif
1187 frameShuffler->prepareForSlowPath();
1188 } else {
1189 stubJit.move(calleeGPR, GPRInfo::regT0);
1190#if USE(JSVALUE32_64)
1191 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1192#endif
1193 }
1194 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1195 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4);
1196
1197 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1198 AssemblyHelpers::Jump slow = stubJit.jump();
1199
1200 LinkBuffer patchBuffer(stubJit, owner, JITCompilationCanFail);
1201 if (patchBuffer.didFailToAllocate()) {
1202 linkVirtualFor(exec, callLinkInfo);
1203 return;
1204 }
1205
1206 RELEASE_ASSERT(callCases.size() == calls.size());
1207 for (CallToCodePtr callToCodePtr : calls) {
1208#if CPU(ARM_THUMB2)
1209 // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
1210 // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
1211 bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
1212 void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress();
1213 patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::createFromExecutableAddress(target)));
1214#else
1215 patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr));
1216#endif
1217 }
1218 if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1219 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
1220 else
1221 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
1222 patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
1223
1224 auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
1225 FINALIZE_CODE_FOR(
1226 callerCodeBlock, patchBuffer, JITStubRoutinePtrTag,
1227 "Polymorphic call stub for %s, return point %p, targets %s",
1228 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
1229 toCString(listDump(callCases)).data()),
1230 vm, owner, exec->callerFrame(), callLinkInfo, callCases,
1231 WTFMove(fastCounts)));
1232
1233 MacroAssembler::replaceWithJump(
1234 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
1235 CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code()));
1236 // The original slow path is unreachable on 64-bits, but still
1237 // reachable on 32-bits since a non-cell callee will always
1238 // trigger the slow path
1239 linkSlowFor(&vm, callLinkInfo);
1240
1241 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1242 // that it's no longer on stack.
1243 callLinkInfo.setStub(WTFMove(stubRoutine));
1244
1245 // The call link info no longer has a call cache apart from the jump to the polymorphic call
1246 // stub.
1247 if (callLinkInfo.isOnList())
1248 callLinkInfo.remove();
1249}
1250
1251void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
1252{
1253 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
1254 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1255}
1256
1257void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1258{
1259 V_JITOperation_ESsiJJI unoptimizedFunction = reinterpret_cast<V_JITOperation_ESsiJJI>(readPutICCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
1260 V_JITOperation_ESsiJJI optimizedFunction;
1261 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
1262 optimizedFunction = operationPutByIdStrictOptimize;
1263 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
1264 optimizedFunction = operationPutByIdNonStrictOptimize;
1265 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
1266 optimizedFunction = operationPutByIdDirectStrictOptimize;
1267 else {
1268 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
1269 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1270 }
1271
1272 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
1273 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1274}
1275
1276static void resetPatchableJump(StructureStubInfo& stubInfo)
1277{
1278 MacroAssembler::repatchJump(stubInfo.patchableJump(), stubInfo.slowPathStartLocation());
1279}
1280
1281void resetInByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
1282{
1283 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), operationInByIdOptimize);
1284 InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
1285}
1286
1287void resetInstanceOf(StructureStubInfo& stubInfo)
1288{
1289 resetPatchableJump(stubInfo);
1290}
1291
1292MacroAssemblerCodePtr<JSEntryPtrTag> jsToWasmICCodePtr(VM& vm, CodeSpecializationKind kind, JSObject* callee)
1293{
1294#if ENABLE(WEBASSEMBLY)
1295 if (!callee)
1296 return nullptr;
1297 if (kind != CodeForCall)
1298 return nullptr;
1299 if (auto* wasmFunction = jsDynamicCast<WebAssemblyFunction*>(vm, callee))
1300 return wasmFunction->jsCallEntrypoint();
1301#else
1302 UNUSED_PARAM(vm);
1303 UNUSED_PARAM(kind);
1304 UNUSED_PARAM(callee);
1305#endif
1306 return nullptr;
1307}
1308
1309} // namespace JSC
1310
1311#endif
1312