| 1 | /* |
| 2 | * Copyright (C) 2011-2018 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
| 14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
| 17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| 18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| 20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
| 21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #pragma once |
| 27 | |
| 28 | #include "CodeBlock.h" |
| 29 | #include "CodeBlockHash.h" |
| 30 | #include "CodeOrigin.h" |
| 31 | #include "ValueRecovery.h" |
| 32 | #include "WriteBarrier.h" |
| 33 | #include <wtf/PrintStream.h> |
| 34 | #include <wtf/StdLibExtras.h> |
| 35 | #include <wtf/Vector.h> |
| 36 | |
| 37 | namespace JSC { |
| 38 | |
| 39 | struct InlineCallFrame; |
| 40 | class ExecState; |
| 41 | class JSFunction; |
| 42 | |
| 43 | struct InlineCallFrame { |
| 44 | enum Kind { |
| 45 | Call, |
| 46 | Construct, |
| 47 | TailCall, |
| 48 | CallVarargs, |
| 49 | ConstructVarargs, |
| 50 | TailCallVarargs, |
| 51 | |
| 52 | // For these, the stackOffset incorporates the argument count plus the true return PC |
| 53 | // slot. |
| 54 | GetterCall, |
| 55 | SetterCall |
| 56 | }; |
| 57 | |
| 58 | static CallMode callModeFor(Kind kind) |
| 59 | { |
| 60 | switch (kind) { |
| 61 | case Call: |
| 62 | case CallVarargs: |
| 63 | case GetterCall: |
| 64 | case SetterCall: |
| 65 | return CallMode::Regular; |
| 66 | case TailCall: |
| 67 | case TailCallVarargs: |
| 68 | return CallMode::Tail; |
| 69 | case Construct: |
| 70 | case ConstructVarargs: |
| 71 | return CallMode::Construct; |
| 72 | } |
| 73 | RELEASE_ASSERT_NOT_REACHED(); |
| 74 | } |
| 75 | |
| 76 | static Kind kindFor(CallMode callMode) |
| 77 | { |
| 78 | switch (callMode) { |
| 79 | case CallMode::Regular: |
| 80 | return Call; |
| 81 | case CallMode::Construct: |
| 82 | return Construct; |
| 83 | case CallMode::Tail: |
| 84 | return TailCall; |
| 85 | } |
| 86 | RELEASE_ASSERT_NOT_REACHED(); |
| 87 | } |
| 88 | |
| 89 | static Kind varargsKindFor(CallMode callMode) |
| 90 | { |
| 91 | switch (callMode) { |
| 92 | case CallMode::Regular: |
| 93 | return CallVarargs; |
| 94 | case CallMode::Construct: |
| 95 | return ConstructVarargs; |
| 96 | case CallMode::Tail: |
| 97 | return TailCallVarargs; |
| 98 | } |
| 99 | RELEASE_ASSERT_NOT_REACHED(); |
| 100 | } |
| 101 | |
| 102 | static CodeSpecializationKind specializationKindFor(Kind kind) |
| 103 | { |
| 104 | switch (kind) { |
| 105 | case Call: |
| 106 | case CallVarargs: |
| 107 | case TailCall: |
| 108 | case TailCallVarargs: |
| 109 | case GetterCall: |
| 110 | case SetterCall: |
| 111 | return CodeForCall; |
| 112 | case Construct: |
| 113 | case ConstructVarargs: |
| 114 | return CodeForConstruct; |
| 115 | } |
| 116 | RELEASE_ASSERT_NOT_REACHED(); |
| 117 | } |
| 118 | |
| 119 | static bool isVarargs(Kind kind) |
| 120 | { |
| 121 | switch (kind) { |
| 122 | case CallVarargs: |
| 123 | case TailCallVarargs: |
| 124 | case ConstructVarargs: |
| 125 | return true; |
| 126 | default: |
| 127 | return false; |
| 128 | } |
| 129 | } |
| 130 | |
| 131 | static bool isTail(Kind kind) |
| 132 | { |
| 133 | switch (kind) { |
| 134 | case TailCall: |
| 135 | case TailCallVarargs: |
| 136 | return true; |
| 137 | default: |
| 138 | return false; |
| 139 | } |
| 140 | } |
| 141 | bool isTail() const |
| 142 | { |
| 143 | return isTail(static_cast<Kind>(kind)); |
| 144 | } |
| 145 | |
| 146 | static CodeOrigin* computeCallerSkippingTailCalls(InlineCallFrame* inlineCallFrame, Kind* callerCallKind = nullptr) |
| 147 | { |
| 148 | CodeOrigin* codeOrigin; |
| 149 | bool tailCallee; |
| 150 | int callKind; |
| 151 | do { |
| 152 | tailCallee = inlineCallFrame->isTail(); |
| 153 | callKind = inlineCallFrame->kind; |
| 154 | codeOrigin = &inlineCallFrame->directCaller; |
| 155 | inlineCallFrame = codeOrigin->inlineCallFrame(); |
| 156 | } while (inlineCallFrame && tailCallee); |
| 157 | |
| 158 | if (tailCallee) |
| 159 | return nullptr; |
| 160 | |
| 161 | if (callerCallKind) |
| 162 | *callerCallKind = static_cast<Kind>(callKind); |
| 163 | |
| 164 | return codeOrigin; |
| 165 | } |
| 166 | |
| 167 | CodeOrigin* getCallerSkippingTailCalls(Kind* callerCallKind = nullptr) |
| 168 | { |
| 169 | return computeCallerSkippingTailCalls(this, callerCallKind); |
| 170 | } |
| 171 | |
| 172 | InlineCallFrame* getCallerInlineFrameSkippingTailCalls() |
| 173 | { |
| 174 | CodeOrigin* caller = getCallerSkippingTailCalls(); |
| 175 | return caller ? caller->inlineCallFrame() : nullptr; |
| 176 | } |
| 177 | |
| 178 | Vector<ValueRecovery> argumentsWithFixup; // Includes 'this' and arity fixups. |
| 179 | WriteBarrier<CodeBlock> baselineCodeBlock; |
| 180 | CodeOrigin directCaller; |
| 181 | |
| 182 | unsigned argumentCountIncludingThis { 0 }; // Do not include fixups. |
| 183 | signed stackOffset : 28; |
| 184 | unsigned kind : 3; // real type is Kind |
| 185 | bool isClosureCall : 1; // If false then we know that callee/scope are constants and the DFG won't treat them as variables, i.e. they have to be recovered manually. |
| 186 | VirtualRegister argumentCountRegister; // Only set when we inline a varargs call. |
| 187 | |
| 188 | ValueRecovery calleeRecovery; |
| 189 | |
| 190 | // There is really no good notion of a "default" set of values for |
| 191 | // InlineCallFrame's fields. This constructor is here just to reduce confusion if |
| 192 | // we forgot to initialize explicitly. |
| 193 | InlineCallFrame() |
| 194 | : stackOffset(0) |
| 195 | , kind(Call) |
| 196 | , isClosureCall(false) |
| 197 | { |
| 198 | } |
| 199 | |
| 200 | bool isVarargs() const |
| 201 | { |
| 202 | return isVarargs(static_cast<Kind>(kind)); |
| 203 | } |
| 204 | |
| 205 | CodeSpecializationKind specializationKind() const { return specializationKindFor(static_cast<Kind>(kind)); } |
| 206 | |
| 207 | JSFunction* calleeConstant() const; |
| 208 | |
| 209 | // Get the callee given a machine call frame to which this InlineCallFrame belongs. |
| 210 | JSFunction* calleeForCallFrame(ExecState*) const; |
| 211 | |
| 212 | CString inferredName() const; |
| 213 | CodeBlockHash hash() const; |
| 214 | CString hashAsStringIfPossible() const; |
| 215 | |
| 216 | void setStackOffset(signed offset) |
| 217 | { |
| 218 | stackOffset = offset; |
| 219 | RELEASE_ASSERT(static_cast<signed>(stackOffset) == offset); |
| 220 | } |
| 221 | |
| 222 | ptrdiff_t callerFrameOffset() const { return stackOffset * sizeof(Register) + CallFrame::callerFrameOffset(); } |
| 223 | ptrdiff_t returnPCOffset() const { return stackOffset * sizeof(Register) + CallFrame::returnPCOffset(); } |
| 224 | |
| 225 | bool isStrictMode() const { return baselineCodeBlock->isStrictMode(); } |
| 226 | |
| 227 | void dumpBriefFunctionInformation(PrintStream&) const; |
| 228 | void dump(PrintStream&) const; |
| 229 | void dumpInContext(PrintStream&, DumpContext*) const; |
| 230 | |
| 231 | MAKE_PRINT_METHOD(InlineCallFrame, dumpBriefFunctionInformation, briefFunctionInformation); |
| 232 | |
| 233 | }; |
| 234 | |
| 235 | inline CodeBlock* baselineCodeBlockForInlineCallFrame(InlineCallFrame* inlineCallFrame) |
| 236 | { |
| 237 | RELEASE_ASSERT(inlineCallFrame); |
| 238 | return inlineCallFrame->baselineCodeBlock.get(); |
| 239 | } |
| 240 | |
| 241 | inline CodeBlock* baselineCodeBlockForOriginAndBaselineCodeBlock(const CodeOrigin& codeOrigin, CodeBlock* baselineCodeBlock) |
| 242 | { |
| 243 | ASSERT(baselineCodeBlock->jitType() == JITType::BaselineJIT); |
| 244 | auto* inlineCallFrame = codeOrigin.inlineCallFrame(); |
| 245 | if (inlineCallFrame) |
| 246 | return baselineCodeBlockForInlineCallFrame(inlineCallFrame); |
| 247 | return baselineCodeBlock; |
| 248 | } |
| 249 | |
| 250 | // This function is defined here and not in CodeOrigin because it needs access to the directCaller field in InlineCallFrame |
| 251 | template <typename Function> |
| 252 | inline void CodeOrigin::walkUpInlineStack(const Function& function) |
| 253 | { |
| 254 | CodeOrigin codeOrigin = *this; |
| 255 | while (true) { |
| 256 | function(codeOrigin); |
| 257 | auto* inlineCallFrame = codeOrigin.inlineCallFrame(); |
| 258 | if (!inlineCallFrame) |
| 259 | break; |
| 260 | codeOrigin = inlineCallFrame->directCaller; |
| 261 | } |
| 262 | } |
| 263 | |
| 264 | ALWAYS_INLINE VirtualRegister remapOperand(InlineCallFrame* inlineCallFrame, VirtualRegister reg) |
| 265 | { |
| 266 | if (inlineCallFrame) |
| 267 | return VirtualRegister(reg.offset() + inlineCallFrame->stackOffset); |
| 268 | return reg; |
| 269 | } |
| 270 | |
| 271 | } // namespace JSC |
| 272 | |
| 273 | namespace WTF { |
| 274 | |
| 275 | void printInternal(PrintStream&, JSC::InlineCallFrame::Kind); |
| 276 | |
| 277 | } // namespace WTF |
| 278 | |