| 1 | /* |
| 2 | * Copyright (C) 2012-2019 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
| 14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
| 17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| 18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| 20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| 21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #include "config.h" |
| 27 | #include "GetByIdStatus.h" |
| 28 | |
| 29 | #include "BytecodeStructs.h" |
| 30 | #include "CodeBlock.h" |
| 31 | #include "ComplexGetStatus.h" |
| 32 | #include "GetterSetterAccessCase.h" |
| 33 | #include "ICStatusUtils.h" |
| 34 | #include "InterpreterInlines.h" |
| 35 | #include "IntrinsicGetterAccessCase.h" |
| 36 | #include "JSCInlines.h" |
| 37 | #include "JSScope.h" |
| 38 | #include "LLIntData.h" |
| 39 | #include "LowLevelInterpreter.h" |
| 40 | #include "ModuleNamespaceAccessCase.h" |
| 41 | #include "PolymorphicAccess.h" |
| 42 | #include "StructureStubInfo.h" |
| 43 | #include <wtf/ListDump.h> |
| 44 | |
| 45 | namespace JSC { |
| 46 | namespace DOMJIT { |
| 47 | class GetterSetter; |
| 48 | } |
| 49 | |
| 50 | bool GetByIdStatus::appendVariant(const GetByIdVariant& variant) |
| 51 | { |
| 52 | return appendICStatusVariant(m_variants, variant); |
| 53 | } |
| 54 | |
| 55 | GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, UniquedStringImpl* uid) |
| 56 | { |
| 57 | VM& vm = *profiledBlock->vm(); |
| 58 | |
| 59 | auto instruction = profiledBlock->instructions().at(bytecodeIndex); |
| 60 | |
| 61 | StructureID structureID; |
| 62 | switch (instruction->opcodeID()) { |
| 63 | case op_get_by_id: { |
| 64 | auto& metadata = instruction->as<OpGetById>().metadata(profiledBlock); |
| 65 | // FIXME: We should not just bail if we see a get_by_id_proto_load. |
| 66 | // https://bugs.webkit.org/show_bug.cgi?id=158039 |
| 67 | if (metadata.m_mode != GetByIdMode::Default) |
| 68 | return GetByIdStatus(NoInformation, false); |
| 69 | structureID = metadata.m_modeMetadata.defaultMode.structureID; |
| 70 | break; |
| 71 | } |
| 72 | case op_get_by_id_direct: |
| 73 | structureID = instruction->as<OpGetByIdDirect>().metadata(profiledBlock).m_structureID; |
| 74 | break; |
| 75 | case op_try_get_by_id: { |
| 76 | // FIXME: We should not just bail if we see a try_get_by_id. |
| 77 | // https://bugs.webkit.org/show_bug.cgi?id=158039 |
| 78 | return GetByIdStatus(NoInformation, false); |
| 79 | } |
| 80 | |
| 81 | default: { |
| 82 | ASSERT_NOT_REACHED(); |
| 83 | return GetByIdStatus(NoInformation, false); |
| 84 | } |
| 85 | } |
| 86 | |
| 87 | if (!structureID) |
| 88 | return GetByIdStatus(NoInformation, false); |
| 89 | |
| 90 | Structure* structure = vm.heap.structureIDTable().get(structureID); |
| 91 | |
| 92 | if (structure->takesSlowPathInDFGForImpureProperty()) |
| 93 | return GetByIdStatus(NoInformation, false); |
| 94 | |
| 95 | unsigned attributes; |
| 96 | PropertyOffset offset = structure->getConcurrently(uid, attributes); |
| 97 | if (!isValidOffset(offset)) |
| 98 | return GetByIdStatus(NoInformation, false); |
| 99 | if (attributes & PropertyAttribute::CustomAccessorOrValue) |
| 100 | return GetByIdStatus(NoInformation, false); |
| 101 | |
| 102 | return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset)); |
| 103 | } |
| 104 | |
| 105 | GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, unsigned bytecodeIndex, UniquedStringImpl* uid, ExitFlag didExit, CallLinkStatus::ExitSiteData callExitSiteData) |
| 106 | { |
| 107 | ConcurrentJSLocker locker(profiledBlock->m_lock); |
| 108 | |
| 109 | GetByIdStatus result; |
| 110 | |
| 111 | #if ENABLE(DFG_JIT) |
| 112 | result = computeForStubInfoWithoutExitSiteFeedback( |
| 113 | locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)).stubInfo, uid, |
| 114 | callExitSiteData); |
| 115 | |
| 116 | if (didExit) |
| 117 | return result.slowVersion(); |
| 118 | #else |
| 119 | UNUSED_PARAM(map); |
| 120 | UNUSED_PARAM(didExit); |
| 121 | UNUSED_PARAM(callExitSiteData); |
| 122 | #endif |
| 123 | |
| 124 | if (!result) |
| 125 | return computeFromLLInt(profiledBlock, bytecodeIndex, uid); |
| 126 | |
| 127 | return result; |
| 128 | } |
| 129 | |
| 130 | #if ENABLE(DFG_JIT) |
| 131 | GetByIdStatus GetByIdStatus::computeForStubInfo(const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, CodeOrigin codeOrigin, UniquedStringImpl* uid) |
| 132 | { |
| 133 | unsigned bytecodeIndex = codeOrigin.bytecodeIndex(); |
| 134 | GetByIdStatus result = GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback( |
| 135 | locker, profiledBlock, stubInfo, uid, |
| 136 | CallLinkStatus::computeExitSiteData(profiledBlock, bytecodeIndex)); |
| 137 | |
| 138 | if (!result.takesSlowPath() && hasBadCacheExitSite(profiledBlock, bytecodeIndex)) |
| 139 | return result.slowVersion(); |
| 140 | return result; |
| 141 | } |
| 142 | #endif // ENABLE(DFG_JIT) |
| 143 | |
| 144 | #if ENABLE(JIT) |
| 145 | GetByIdStatus::GetByIdStatus(const ModuleNamespaceAccessCase& accessCase) |
| 146 | : m_moduleNamespaceObject(accessCase.moduleNamespaceObject()) |
| 147 | , m_moduleEnvironment(accessCase.moduleEnvironment()) |
| 148 | , m_scopeOffset(accessCase.scopeOffset()) |
| 149 | , m_state(ModuleNamespace) |
| 150 | , m_wasSeenInJIT(true) |
| 151 | { |
| 152 | } |
| 153 | |
| 154 | GetByIdStatus GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback( |
| 155 | const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, UniquedStringImpl* uid, |
| 156 | CallLinkStatus::ExitSiteData callExitSiteData) |
| 157 | { |
| 158 | StubInfoSummary summary = StructureStubInfo::summary(stubInfo); |
| 159 | if (!isInlineable(summary)) |
| 160 | return GetByIdStatus(summary); |
| 161 | |
| 162 | // Finally figure out if we can derive an access strategy. |
| 163 | GetByIdStatus result; |
| 164 | result.m_state = Simple; |
| 165 | result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only. |
| 166 | switch (stubInfo->cacheType) { |
| 167 | case CacheType::Unset: |
| 168 | return GetByIdStatus(NoInformation); |
| 169 | |
| 170 | case CacheType::GetByIdSelf: { |
| 171 | Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get(); |
| 172 | if (structure->takesSlowPathInDFGForImpureProperty()) |
| 173 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 174 | unsigned attributes; |
| 175 | GetByIdVariant variant; |
| 176 | variant.m_offset = structure->getConcurrently(uid, attributes); |
| 177 | if (!isValidOffset(variant.m_offset)) |
| 178 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 179 | if (attributes & PropertyAttribute::CustomAccessorOrValue) |
| 180 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 181 | |
| 182 | variant.m_structureSet.add(structure); |
| 183 | bool didAppend = result.appendVariant(variant); |
| 184 | ASSERT_UNUSED(didAppend, didAppend); |
| 185 | return result; |
| 186 | } |
| 187 | |
| 188 | case CacheType::Stub: { |
| 189 | PolymorphicAccess* list = stubInfo->u.stub; |
| 190 | if (list->size() == 1) { |
| 191 | const AccessCase& access = list->at(0); |
| 192 | switch (access.type()) { |
| 193 | case AccessCase::ModuleNamespaceLoad: |
| 194 | return GetByIdStatus(access.as<ModuleNamespaceAccessCase>()); |
| 195 | default: |
| 196 | break; |
| 197 | } |
| 198 | } |
| 199 | |
| 200 | for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) { |
| 201 | const AccessCase& access = list->at(listIndex); |
| 202 | if (access.viaProxy()) |
| 203 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 204 | |
| 205 | if (access.usesPolyProto()) |
| 206 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 207 | |
| 208 | Structure* structure = access.structure(); |
| 209 | if (!structure) { |
| 210 | // The null structure cases arise due to array.length and string.length. We have no way |
| 211 | // of creating a GetByIdVariant for those, and we don't really have to since the DFG |
| 212 | // handles those cases in FixupPhase using value profiling. That's a bit awkward - we |
| 213 | // shouldn't have to use value profiling to discover something that the AccessCase |
| 214 | // could have told us. But, it works well enough. So, our only concern here is to not |
| 215 | // crash on null structure. |
| 216 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 217 | } |
| 218 | |
| 219 | ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor( |
| 220 | structure, access.conditionSet(), uid); |
| 221 | |
| 222 | switch (complexGetStatus.kind()) { |
| 223 | case ComplexGetStatus::ShouldSkip: |
| 224 | continue; |
| 225 | |
| 226 | case ComplexGetStatus::TakesSlowPath: |
| 227 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 228 | |
| 229 | case ComplexGetStatus::Inlineable: { |
| 230 | std::unique_ptr<CallLinkStatus> callLinkStatus; |
| 231 | JSFunction* intrinsicFunction = nullptr; |
| 232 | FunctionPtr<OperationPtrTag> customAccessorGetter; |
| 233 | Optional<DOMAttributeAnnotation> domAttribute; |
| 234 | |
| 235 | switch (access.type()) { |
| 236 | case AccessCase::Load: |
| 237 | case AccessCase::GetGetter: |
| 238 | case AccessCase::Miss: { |
| 239 | break; |
| 240 | } |
| 241 | case AccessCase::IntrinsicGetter: { |
| 242 | intrinsicFunction = access.as<IntrinsicGetterAccessCase>().intrinsicFunction(); |
| 243 | break; |
| 244 | } |
| 245 | case AccessCase::Getter: { |
| 246 | callLinkStatus = std::make_unique<CallLinkStatus>(); |
| 247 | if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) { |
| 248 | *callLinkStatus = CallLinkStatus::computeFor( |
| 249 | locker, profiledBlock, *callLinkInfo, callExitSiteData); |
| 250 | } |
| 251 | break; |
| 252 | } |
| 253 | case AccessCase::CustomAccessorGetter: { |
| 254 | customAccessorGetter = access.as<GetterSetterAccessCase>().customAccessor(); |
| 255 | domAttribute = access.as<GetterSetterAccessCase>().domAttribute(); |
| 256 | if (!domAttribute) |
| 257 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 258 | result.m_state = Custom; |
| 259 | break; |
| 260 | } |
| 261 | default: { |
| 262 | // FIXME: It would be totally sweet to support more of these at some point in the |
| 263 | // future. https://bugs.webkit.org/show_bug.cgi?id=133052 |
| 264 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 265 | } } |
| 266 | |
| 267 | ASSERT((AccessCase::Miss == access.type()) == (access.offset() == invalidOffset)); |
| 268 | GetByIdVariant variant( |
| 269 | StructureSet(structure), complexGetStatus.offset(), |
| 270 | complexGetStatus.conditionSet(), WTFMove(callLinkStatus), |
| 271 | intrinsicFunction, |
| 272 | customAccessorGetter, |
| 273 | domAttribute); |
| 274 | |
| 275 | if (!result.appendVariant(variant)) |
| 276 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 277 | |
| 278 | if (domAttribute) { |
| 279 | // Give up when custom accesses are not merged into one. |
| 280 | if (result.numVariants() != 1) |
| 281 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 282 | } else { |
| 283 | // Give up when custom access and simple access are mixed. |
| 284 | if (result.m_state == Custom) |
| 285 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 286 | } |
| 287 | break; |
| 288 | } } |
| 289 | } |
| 290 | |
| 291 | return result; |
| 292 | } |
| 293 | |
| 294 | default: |
| 295 | return GetByIdStatus(JSC::slowVersion(summary)); |
| 296 | } |
| 297 | |
| 298 | RELEASE_ASSERT_NOT_REACHED(); |
| 299 | return GetByIdStatus(); |
| 300 | } |
| 301 | |
| 302 | GetByIdStatus GetByIdStatus::computeFor( |
| 303 | CodeBlock* profiledBlock, ICStatusMap& baselineMap, |
| 304 | ICStatusContextStack& icContextStack, CodeOrigin codeOrigin, UniquedStringImpl* uid) |
| 305 | { |
| 306 | unsigned bytecodeIndex = codeOrigin.bytecodeIndex(); |
| 307 | CallLinkStatus::ExitSiteData callExitSiteData = CallLinkStatus::computeExitSiteData(profiledBlock, bytecodeIndex); |
| 308 | ExitFlag didExit = hasBadCacheExitSite(profiledBlock, bytecodeIndex); |
| 309 | |
| 310 | for (ICStatusContext* context : icContextStack) { |
| 311 | ICStatus status = context->get(codeOrigin); |
| 312 | |
| 313 | auto bless = [&] (const GetByIdStatus& result) -> GetByIdStatus { |
| 314 | if (!context->isInlined(codeOrigin)) { |
| 315 | // Merge with baseline result, which also happens to contain exit data for both |
| 316 | // inlined and not-inlined. |
| 317 | GetByIdStatus baselineResult = computeFor( |
| 318 | profiledBlock, baselineMap, bytecodeIndex, uid, didExit, |
| 319 | callExitSiteData); |
| 320 | baselineResult.merge(result); |
| 321 | return baselineResult; |
| 322 | } |
| 323 | if (didExit.isSet(ExitFromInlined)) |
| 324 | return result.slowVersion(); |
| 325 | return result; |
| 326 | }; |
| 327 | |
| 328 | if (status.stubInfo) { |
| 329 | GetByIdStatus result; |
| 330 | { |
| 331 | ConcurrentJSLocker locker(context->optimizedCodeBlock->m_lock); |
| 332 | result = computeForStubInfoWithoutExitSiteFeedback( |
| 333 | locker, context->optimizedCodeBlock, status.stubInfo, uid, callExitSiteData); |
| 334 | } |
| 335 | if (result.isSet()) |
| 336 | return bless(result); |
| 337 | } |
| 338 | |
| 339 | if (status.getStatus) |
| 340 | return bless(*status.getStatus); |
| 341 | } |
| 342 | |
| 343 | return computeFor(profiledBlock, baselineMap, bytecodeIndex, uid, didExit, callExitSiteData); |
| 344 | } |
| 345 | |
| 346 | GetByIdStatus GetByIdStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid) |
| 347 | { |
| 348 | // For now we only handle the super simple self access case. We could handle the |
| 349 | // prototype case in the future. |
| 350 | // |
| 351 | // Note that this code is also used for GetByIdDirect since this function only looks |
| 352 | // into direct properties. When supporting prototype chains, we should split this for |
| 353 | // GetById and GetByIdDirect. |
| 354 | |
| 355 | if (set.isEmpty()) |
| 356 | return GetByIdStatus(); |
| 357 | |
| 358 | if (parseIndex(*uid)) |
| 359 | return GetByIdStatus(TakesSlowPath); |
| 360 | |
| 361 | GetByIdStatus result; |
| 362 | result.m_state = Simple; |
| 363 | result.m_wasSeenInJIT = false; |
| 364 | for (unsigned i = 0; i < set.size(); ++i) { |
| 365 | Structure* structure = set[i]; |
| 366 | if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType) |
| 367 | return GetByIdStatus(TakesSlowPath); |
| 368 | |
| 369 | if (!structure->propertyAccessesAreCacheable()) |
| 370 | return GetByIdStatus(TakesSlowPath); |
| 371 | |
| 372 | unsigned attributes; |
| 373 | PropertyOffset offset = structure->getConcurrently(uid, attributes); |
| 374 | if (!isValidOffset(offset)) |
| 375 | return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it. |
| 376 | if (attributes & PropertyAttribute::Accessor) |
| 377 | return GetByIdStatus(MakesCalls); // We could be smarter here, like strength-reducing this to a Call. |
| 378 | if (attributes & PropertyAttribute::CustomAccessorOrValue) |
| 379 | return GetByIdStatus(TakesSlowPath); |
| 380 | |
| 381 | if (!result.appendVariant(GetByIdVariant(structure, offset))) |
| 382 | return GetByIdStatus(TakesSlowPath); |
| 383 | } |
| 384 | |
| 385 | return result; |
| 386 | } |
| 387 | #endif // ENABLE(JIT) |
| 388 | |
| 389 | bool GetByIdStatus::makesCalls() const |
| 390 | { |
| 391 | switch (m_state) { |
| 392 | case NoInformation: |
| 393 | case TakesSlowPath: |
| 394 | case Custom: |
| 395 | case ModuleNamespace: |
| 396 | return false; |
| 397 | case Simple: |
| 398 | for (unsigned i = m_variants.size(); i--;) { |
| 399 | if (m_variants[i].callLinkStatus()) |
| 400 | return true; |
| 401 | } |
| 402 | return false; |
| 403 | case MakesCalls: |
| 404 | return true; |
| 405 | } |
| 406 | RELEASE_ASSERT_NOT_REACHED(); |
| 407 | |
| 408 | return false; |
| 409 | } |
| 410 | |
| 411 | GetByIdStatus GetByIdStatus::slowVersion() const |
| 412 | { |
| 413 | return GetByIdStatus(makesCalls() ? MakesCalls : TakesSlowPath, wasSeenInJIT()); |
| 414 | } |
| 415 | |
| 416 | void GetByIdStatus::merge(const GetByIdStatus& other) |
| 417 | { |
| 418 | if (other.m_state == NoInformation) |
| 419 | return; |
| 420 | |
| 421 | auto mergeSlow = [&] () { |
| 422 | *this = GetByIdStatus((makesCalls() || other.makesCalls()) ? MakesCalls : TakesSlowPath); |
| 423 | }; |
| 424 | |
| 425 | switch (m_state) { |
| 426 | case NoInformation: |
| 427 | *this = other; |
| 428 | return; |
| 429 | |
| 430 | case Simple: |
| 431 | case Custom: |
| 432 | if (m_state != other.m_state) |
| 433 | return mergeSlow(); |
| 434 | |
| 435 | for (const GetByIdVariant& otherVariant : other.m_variants) { |
| 436 | if (!appendVariant(otherVariant)) |
| 437 | return mergeSlow(); |
| 438 | } |
| 439 | return; |
| 440 | |
| 441 | case ModuleNamespace: |
| 442 | if (other.m_state != ModuleNamespace) |
| 443 | return mergeSlow(); |
| 444 | |
| 445 | if (m_moduleNamespaceObject != other.m_moduleNamespaceObject) |
| 446 | return mergeSlow(); |
| 447 | |
| 448 | if (m_moduleEnvironment != other.m_moduleEnvironment) |
| 449 | return mergeSlow(); |
| 450 | |
| 451 | if (m_scopeOffset != other.m_scopeOffset) |
| 452 | return mergeSlow(); |
| 453 | |
| 454 | return; |
| 455 | |
| 456 | case TakesSlowPath: |
| 457 | case MakesCalls: |
| 458 | return mergeSlow(); |
| 459 | } |
| 460 | |
| 461 | RELEASE_ASSERT_NOT_REACHED(); |
| 462 | } |
| 463 | |
| 464 | void GetByIdStatus::filter(const StructureSet& set) |
| 465 | { |
| 466 | if (m_state != Simple) |
| 467 | return; |
| 468 | filterICStatusVariants(m_variants, set); |
| 469 | if (m_variants.isEmpty()) |
| 470 | m_state = NoInformation; |
| 471 | } |
| 472 | |
| 473 | void GetByIdStatus::markIfCheap(SlotVisitor& visitor) |
| 474 | { |
| 475 | for (GetByIdVariant& variant : m_variants) |
| 476 | variant.markIfCheap(visitor); |
| 477 | } |
| 478 | |
| 479 | bool GetByIdStatus::finalize(VM& vm) |
| 480 | { |
| 481 | for (GetByIdVariant& variant : m_variants) { |
| 482 | if (!variant.finalize(vm)) |
| 483 | return false; |
| 484 | } |
| 485 | if (m_moduleNamespaceObject && !vm.heap.isMarked(m_moduleNamespaceObject)) |
| 486 | return false; |
| 487 | if (m_moduleEnvironment && !vm.heap.isMarked(m_moduleEnvironment)) |
| 488 | return false; |
| 489 | return true; |
| 490 | } |
| 491 | |
| 492 | void GetByIdStatus::dump(PrintStream& out) const |
| 493 | { |
| 494 | out.print("(" ); |
| 495 | switch (m_state) { |
| 496 | case NoInformation: |
| 497 | out.print("NoInformation" ); |
| 498 | break; |
| 499 | case Simple: |
| 500 | out.print("Simple" ); |
| 501 | break; |
| 502 | case Custom: |
| 503 | out.print("Custom" ); |
| 504 | break; |
| 505 | case ModuleNamespace: |
| 506 | out.print("ModuleNamespace" ); |
| 507 | break; |
| 508 | case TakesSlowPath: |
| 509 | out.print("TakesSlowPath" ); |
| 510 | break; |
| 511 | case MakesCalls: |
| 512 | out.print("MakesCalls" ); |
| 513 | break; |
| 514 | } |
| 515 | out.print(", " , listDump(m_variants), ", seenInJIT = " , m_wasSeenInJIT, ")" ); |
| 516 | } |
| 517 | |
| 518 | } // namespace JSC |
| 519 | |
| 520 | |