| 1 | /* |
| 2 | * Copyright (C) 2014-2018 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
| 14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
| 17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| 18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| 20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| 21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #pragma once |
| 27 | |
| 28 | #if ENABLE(JIT) |
| 29 | |
| 30 | #include "AccessCase.h" |
| 31 | #include "JITStubRoutine.h" |
| 32 | #include "JSFunctionInlines.h" |
| 33 | #include "MacroAssembler.h" |
| 34 | #include "ScratchRegisterAllocator.h" |
| 35 | #include <wtf/Vector.h> |
| 36 | |
| 37 | namespace JSC { |
| 38 | namespace DOMJIT { |
| 39 | class GetterSetter; |
| 40 | } |
| 41 | |
| 42 | class CodeBlock; |
| 43 | class PolymorphicAccess; |
| 44 | class StructureStubInfo; |
| 45 | class WatchpointsOnStructureStubInfo; |
| 46 | class ScratchRegisterAllocator; |
| 47 | |
| 48 | class AccessGenerationResult { |
| 49 | public: |
| 50 | enum Kind { |
| 51 | MadeNoChanges, |
| 52 | GaveUp, |
| 53 | Buffered, |
| 54 | GeneratedNewCode, |
| 55 | GeneratedFinalCode, // Generated so much code that we never want to generate code again. |
| 56 | ResetStubAndFireWatchpoints // We found out some data that makes us want to start over fresh with this stub. Currently, this happens when we detect poly proto. |
| 57 | }; |
| 58 | |
| 59 | |
| 60 | AccessGenerationResult() = default; |
| 61 | AccessGenerationResult(AccessGenerationResult&&) = default; |
| 62 | AccessGenerationResult& operator=(AccessGenerationResult&&) = default; |
| 63 | |
| 64 | AccessGenerationResult(Kind kind) |
| 65 | : m_kind(kind) |
| 66 | { |
| 67 | RELEASE_ASSERT(kind != GeneratedNewCode); |
| 68 | RELEASE_ASSERT(kind != GeneratedFinalCode); |
| 69 | } |
| 70 | |
| 71 | AccessGenerationResult(Kind kind, MacroAssemblerCodePtr<JITStubRoutinePtrTag> code) |
| 72 | : m_kind(kind) |
| 73 | , m_code(code) |
| 74 | { |
| 75 | RELEASE_ASSERT(kind == GeneratedNewCode || kind == GeneratedFinalCode); |
| 76 | RELEASE_ASSERT(code); |
| 77 | } |
| 78 | |
| 79 | bool operator==(const AccessGenerationResult& other) const |
| 80 | { |
| 81 | return m_kind == other.m_kind && m_code == other.m_code; |
| 82 | } |
| 83 | |
| 84 | bool operator!=(const AccessGenerationResult& other) const |
| 85 | { |
| 86 | return !(*this == other); |
| 87 | } |
| 88 | |
| 89 | explicit operator bool() const |
| 90 | { |
| 91 | return *this != AccessGenerationResult(); |
| 92 | } |
| 93 | |
| 94 | Kind kind() const { return m_kind; } |
| 95 | |
| 96 | const MacroAssemblerCodePtr<JITStubRoutinePtrTag>& code() const { return m_code; } |
| 97 | |
| 98 | bool madeNoChanges() const { return m_kind == MadeNoChanges; } |
| 99 | bool gaveUp() const { return m_kind == GaveUp; } |
| 100 | bool buffered() const { return m_kind == Buffered; } |
| 101 | bool generatedNewCode() const { return m_kind == GeneratedNewCode; } |
| 102 | bool generatedFinalCode() const { return m_kind == GeneratedFinalCode; } |
| 103 | bool shouldResetStubAndFireWatchpoints() const { return m_kind == ResetStubAndFireWatchpoints; } |
| 104 | |
| 105 | // If we gave up on this attempt to generate code, or if we generated the "final" code, then we |
| 106 | // should give up after this. |
| 107 | bool shouldGiveUpNow() const { return gaveUp() || generatedFinalCode(); } |
| 108 | |
| 109 | bool generatedSomeCode() const { return generatedNewCode() || generatedFinalCode(); } |
| 110 | |
| 111 | void dump(PrintStream&) const; |
| 112 | |
| 113 | void addWatchpointToFire(InlineWatchpointSet& set, StringFireDetail detail) |
| 114 | { |
| 115 | m_watchpointsToFire.append(std::pair<InlineWatchpointSet&, StringFireDetail>(set, detail)); |
| 116 | } |
| 117 | void fireWatchpoints(VM& vm) |
| 118 | { |
| 119 | ASSERT(m_kind == ResetStubAndFireWatchpoints); |
| 120 | for (auto& pair : m_watchpointsToFire) |
| 121 | pair.first.invalidate(vm, pair.second); |
| 122 | } |
| 123 | |
| 124 | private: |
| 125 | Kind m_kind; |
| 126 | MacroAssemblerCodePtr<JITStubRoutinePtrTag> m_code; |
| 127 | Vector<std::pair<InlineWatchpointSet&, StringFireDetail>> m_watchpointsToFire; |
| 128 | }; |
| 129 | |
| 130 | class PolymorphicAccess { |
| 131 | WTF_MAKE_NONCOPYABLE(PolymorphicAccess); |
| 132 | WTF_MAKE_FAST_ALLOCATED; |
| 133 | public: |
| 134 | PolymorphicAccess(); |
| 135 | ~PolymorphicAccess(); |
| 136 | |
| 137 | // When this fails (returns GaveUp), this will leave the old stub intact but you should not try |
| 138 | // to call this method again for that PolymorphicAccess instance. |
| 139 | AccessGenerationResult addCases( |
| 140 | const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, StructureStubInfo&, const Identifier&, Vector<std::unique_ptr<AccessCase>, 2>); |
| 141 | |
| 142 | AccessGenerationResult addCase( |
| 143 | const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, StructureStubInfo&, const Identifier&, std::unique_ptr<AccessCase>); |
| 144 | |
| 145 | AccessGenerationResult regenerate(const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, StructureStubInfo&, const Identifier&); |
| 146 | |
| 147 | bool isEmpty() const { return m_list.isEmpty(); } |
| 148 | unsigned size() const { return m_list.size(); } |
| 149 | const AccessCase& at(unsigned i) const { return *m_list[i]; } |
| 150 | const AccessCase& operator[](unsigned i) const { return *m_list[i]; } |
| 151 | |
| 152 | // If this returns false then we are requesting a reset of the owning StructureStubInfo. |
| 153 | bool visitWeak(VM&) const; |
| 154 | |
| 155 | // This returns true if it has marked everything it will ever marked. This can be used as an |
| 156 | // optimization to then avoid calling this method again during the fixpoint. |
| 157 | bool propagateTransitions(SlotVisitor&) const; |
| 158 | |
| 159 | void aboutToDie(); |
| 160 | |
| 161 | void dump(PrintStream& out) const; |
| 162 | bool containsPC(void* pc) const |
| 163 | { |
| 164 | if (!m_stubRoutine) |
| 165 | return false; |
| 166 | |
| 167 | uintptr_t pcAsInt = bitwise_cast<uintptr_t>(pc); |
| 168 | return m_stubRoutine->startAddress() <= pcAsInt && pcAsInt <= m_stubRoutine->endAddress(); |
| 169 | } |
| 170 | |
| 171 | private: |
| 172 | friend class AccessCase; |
| 173 | friend class CodeBlock; |
| 174 | friend struct AccessGenerationState; |
| 175 | |
| 176 | typedef Vector<std::unique_ptr<AccessCase>, 2> ListType; |
| 177 | |
| 178 | void commit( |
| 179 | const GCSafeConcurrentJSLocker&, VM&, std::unique_ptr<WatchpointsOnStructureStubInfo>&, CodeBlock*, StructureStubInfo&, |
| 180 | const Identifier&, AccessCase&); |
| 181 | |
| 182 | ListType m_list; |
| 183 | RefPtr<JITStubRoutine> m_stubRoutine; |
| 184 | std::unique_ptr<WatchpointsOnStructureStubInfo> m_watchpoints; |
| 185 | std::unique_ptr<Vector<WriteBarrier<JSCell>>> m_weakReferences; |
| 186 | }; |
| 187 | |
| 188 | struct AccessGenerationState { |
| 189 | AccessGenerationState(VM& vm, JSGlobalObject* globalObject) |
| 190 | : m_vm(vm) |
| 191 | , m_globalObject(globalObject) |
| 192 | , m_calculatedRegistersForCallAndExceptionHandling(false) |
| 193 | , m_needsToRestoreRegistersIfException(false) |
| 194 | , m_calculatedCallSiteIndex(false) |
| 195 | { |
| 196 | } |
| 197 | VM& m_vm; |
| 198 | JSGlobalObject* m_globalObject; |
| 199 | CCallHelpers* jit { nullptr }; |
| 200 | ScratchRegisterAllocator* allocator; |
| 201 | ScratchRegisterAllocator::PreservedState preservedReusedRegisterState; |
| 202 | PolymorphicAccess* access { nullptr }; |
| 203 | StructureStubInfo* stubInfo { nullptr }; |
| 204 | MacroAssembler::JumpList success; |
| 205 | MacroAssembler::JumpList failAndRepatch; |
| 206 | MacroAssembler::JumpList failAndIgnore; |
| 207 | GPRReg baseGPR { InvalidGPRReg }; |
| 208 | GPRReg thisGPR { InvalidGPRReg }; |
| 209 | JSValueRegs valueRegs; |
| 210 | GPRReg scratchGPR { InvalidGPRReg }; |
| 211 | const Identifier* ident; |
| 212 | std::unique_ptr<WatchpointsOnStructureStubInfo> watchpoints; |
| 213 | Vector<WriteBarrier<JSCell>> weakReferences; |
| 214 | |
| 215 | Watchpoint* addWatchpoint(const ObjectPropertyCondition& = ObjectPropertyCondition()); |
| 216 | |
| 217 | void restoreScratch(); |
| 218 | void succeed(); |
| 219 | |
| 220 | struct SpillState { |
| 221 | SpillState() = default; |
| 222 | SpillState(RegisterSet&& regs, unsigned usedStackBytes) |
| 223 | : spilledRegisters(WTFMove(regs)) |
| 224 | , numberOfStackBytesUsedForRegisterPreservation(usedStackBytes) |
| 225 | { |
| 226 | } |
| 227 | |
| 228 | RegisterSet spilledRegisters { }; |
| 229 | unsigned numberOfStackBytesUsedForRegisterPreservation { std::numeric_limits<unsigned>::max() }; |
| 230 | |
| 231 | bool isEmpty() const { return numberOfStackBytesUsedForRegisterPreservation == std::numeric_limits<unsigned>::max(); } |
| 232 | }; |
| 233 | |
| 234 | const RegisterSet& calculateLiveRegistersForCallAndExceptionHandling(); |
| 235 | |
| 236 | SpillState preserveLiveRegistersToStackForCall(const RegisterSet& = { }); |
| 237 | |
| 238 | void restoreLiveRegistersFromStackForCallWithThrownException(const SpillState&); |
| 239 | void restoreLiveRegistersFromStackForCall(const SpillState&, const RegisterSet& dontRestore = { }); |
| 240 | |
| 241 | const RegisterSet& liveRegistersForCall(); |
| 242 | |
| 243 | CallSiteIndex callSiteIndexForExceptionHandlingOrOriginal(); |
| 244 | CallSiteIndex callSiteIndexForExceptionHandling() |
| 245 | { |
| 246 | RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling); |
| 247 | RELEASE_ASSERT(m_needsToRestoreRegistersIfException); |
| 248 | RELEASE_ASSERT(m_calculatedCallSiteIndex); |
| 249 | return m_callSiteIndex; |
| 250 | } |
| 251 | |
| 252 | const HandlerInfo& originalExceptionHandler(); |
| 253 | |
| 254 | bool needsToRestoreRegistersIfException() const { return m_needsToRestoreRegistersIfException; } |
| 255 | CallSiteIndex originalCallSiteIndex() const; |
| 256 | |
| 257 | void emitExplicitExceptionHandler(); |
| 258 | |
| 259 | void setSpillStateForJSGetterSetter(SpillState& spillState) |
| 260 | { |
| 261 | if (!m_spillStateForJSGetterSetter.isEmpty()) { |
| 262 | ASSERT(m_spillStateForJSGetterSetter.numberOfStackBytesUsedForRegisterPreservation == spillState.numberOfStackBytesUsedForRegisterPreservation); |
| 263 | ASSERT(m_spillStateForJSGetterSetter.spilledRegisters == spillState.spilledRegisters); |
| 264 | } |
| 265 | m_spillStateForJSGetterSetter = spillState; |
| 266 | } |
| 267 | SpillState spillStateForJSGetterSetter() const { return m_spillStateForJSGetterSetter; } |
| 268 | |
| 269 | private: |
| 270 | const RegisterSet& liveRegistersToPreserveAtExceptionHandlingCallSite(); |
| 271 | |
| 272 | RegisterSet m_liveRegistersToPreserveAtExceptionHandlingCallSite; |
| 273 | RegisterSet m_liveRegistersForCall; |
| 274 | CallSiteIndex m_callSiteIndex { CallSiteIndex(std::numeric_limits<unsigned>::max()) }; |
| 275 | SpillState m_spillStateForJSGetterSetter; |
| 276 | bool m_calculatedRegistersForCallAndExceptionHandling : 1; |
| 277 | bool m_needsToRestoreRegistersIfException : 1; |
| 278 | bool m_calculatedCallSiteIndex : 1; |
| 279 | }; |
| 280 | |
| 281 | } // namespace JSC |
| 282 | |
| 283 | namespace WTF { |
| 284 | |
| 285 | void printInternal(PrintStream&, JSC::AccessGenerationResult::Kind); |
| 286 | void printInternal(PrintStream&, JSC::AccessCase::AccessType); |
| 287 | void printInternal(PrintStream&, JSC::AccessCase::State); |
| 288 | |
| 289 | } // namespace WTF |
| 290 | |
| 291 | #endif // ENABLE(JIT) |
| 292 | |