1/*
2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(CSS_SELECTOR_JIT)
29
30#include "RegisterAllocator.h"
31#include <JavaScriptCore/MacroAssembler.h>
32#include <limits>
33
34namespace WebCore {
35
36class StackAllocator {
37public:
38 class StackReference {
39 public:
40 StackReference()
41 : m_offsetFromTop(std::numeric_limits<unsigned>::max())
42 { }
43 explicit StackReference(unsigned offset)
44 : m_offsetFromTop(offset)
45 { }
46 operator unsigned() const { return m_offsetFromTop; }
47 bool isValid() const { return m_offsetFromTop != std::numeric_limits<unsigned>::max(); }
48 private:
49 unsigned m_offsetFromTop;
50 };
51
52 typedef Vector<StackReference, maximumRegisterCount> StackReferenceVector;
53
54 StackAllocator(JSC::MacroAssembler& assembler)
55 : m_assembler(assembler)
56 , m_offsetFromTop(0)
57 , m_hasFunctionCallPadding(false)
58 {
59 }
60
61 StackAllocator(const StackAllocator&) = default;
62
63 StackReference stackTop()
64 {
65 return StackReference(m_offsetFromTop + stackUnitInBytes());
66 }
67
68 ~StackAllocator()
69 {
70 RELEASE_ASSERT(!m_offsetFromTop);
71 RELEASE_ASSERT(!m_hasFunctionCallPadding);
72 }
73
74 StackReference allocateUninitialized()
75 {
76 return allocateUninitialized(1)[0];
77 }
78
79 StackReferenceVector allocateUninitialized(unsigned count)
80 {
81 RELEASE_ASSERT(!m_hasFunctionCallPadding);
82 StackReferenceVector stackReferences;
83 unsigned oldOffsetFromTop = m_offsetFromTop;
84#if CPU(ARM64)
85 for (unsigned i = 0; i < count - 1; i += 2) {
86 m_offsetFromTop += stackUnitInBytes();
87 stackReferences.append(StackReference(m_offsetFromTop - stackUnitInBytes() / 2));
88 stackReferences.append(StackReference(m_offsetFromTop));
89 }
90 if (count % 2) {
91 m_offsetFromTop += stackUnitInBytes();
92 stackReferences.append(StackReference(m_offsetFromTop));
93 }
94#else
95 for (unsigned i = 0; i < count; ++i) {
96 m_offsetFromTop += stackUnitInBytes();
97 stackReferences.append(StackReference(m_offsetFromTop));
98 }
99#endif
100 m_assembler.addPtrNoFlags(JSC::MacroAssembler::TrustedImm32(-(m_offsetFromTop - oldOffsetFromTop)), JSC::MacroAssembler::stackPointerRegister);
101 return stackReferences;
102 }
103
104 template<size_t inlineCapacity, typename OverflowHandler>
105 StackReferenceVector push(const Vector<JSC::MacroAssembler::RegisterID, inlineCapacity, OverflowHandler>& registerIDs)
106 {
107 RELEASE_ASSERT(!m_hasFunctionCallPadding);
108
109 StackReferenceVector stackReferences;
110
111 if (registerIDs.isEmpty())
112 return stackReferences;
113
114#if CPU(ARM64)
115 unsigned pushRegisterCount = registerIDs.size();
116 for (unsigned i = 0; i < pushRegisterCount - 1; i += 2) {
117 m_assembler.pushPair(registerIDs[i + 1], registerIDs[i]);
118 m_offsetFromTop += stackUnitInBytes();
119 stackReferences.append(StackReference(m_offsetFromTop - stackUnitInBytes() / 2));
120 stackReferences.append(StackReference(m_offsetFromTop));
121 }
122 if (pushRegisterCount % 2)
123 stackReferences.append(push(registerIDs[pushRegisterCount - 1]));
124#else
125 for (auto registerID : registerIDs)
126 stackReferences.append(push(registerID));
127#endif
128 return stackReferences;
129 }
130
131 StackReference push(JSC::MacroAssembler::RegisterID registerID)
132 {
133 RELEASE_ASSERT(!m_hasFunctionCallPadding);
134 m_assembler.pushToSave(registerID);
135 m_offsetFromTop += stackUnitInBytes();
136 return StackReference(m_offsetFromTop);
137 }
138
139 template<size_t inlineCapacity, typename OverflowHandler>
140 void pop(const StackReferenceVector& stackReferences, const Vector<JSC::MacroAssembler::RegisterID, inlineCapacity, OverflowHandler>& registerIDs)
141 {
142 RELEASE_ASSERT(!m_hasFunctionCallPadding);
143
144 unsigned popRegisterCount = registerIDs.size();
145 RELEASE_ASSERT(stackReferences.size() == popRegisterCount);
146#if CPU(ARM64)
147 ASSERT(m_offsetFromTop >= stackUnitInBytes() * ((popRegisterCount + 1) / 2));
148 unsigned popRegisterCountOdd = popRegisterCount % 2;
149 if (popRegisterCountOdd)
150 pop(stackReferences[popRegisterCount - 1], registerIDs[popRegisterCount - 1]);
151 for (unsigned i = popRegisterCount - popRegisterCountOdd; i > 0; i -= 2) {
152 RELEASE_ASSERT(stackReferences[i - 1] == m_offsetFromTop);
153 RELEASE_ASSERT(stackReferences[i - 2] == m_offsetFromTop - stackUnitInBytes() / 2);
154 RELEASE_ASSERT(m_offsetFromTop >= stackUnitInBytes());
155 m_offsetFromTop -= stackUnitInBytes();
156 m_assembler.popPair(registerIDs[i - 1], registerIDs[i - 2]);
157 }
158#else
159 ASSERT(m_offsetFromTop >= stackUnitInBytes() * popRegisterCount);
160 for (unsigned i = popRegisterCount; i > 0; --i)
161 pop(stackReferences[i - 1], registerIDs[i - 1]);
162#endif
163 }
164
165 void pop(StackReference stackReference, JSC::MacroAssembler::RegisterID registerID)
166 {
167 RELEASE_ASSERT(stackReference == m_offsetFromTop);
168 RELEASE_ASSERT(!m_hasFunctionCallPadding);
169 RELEASE_ASSERT(m_offsetFromTop >= stackUnitInBytes());
170 m_offsetFromTop -= stackUnitInBytes();
171 m_assembler.popToRestore(registerID);
172 }
173
174 void popAndDiscardUpTo(StackReference stackReference)
175 {
176 unsigned positionBeforeStackReference = stackReference - stackUnitInBytes();
177 RELEASE_ASSERT(positionBeforeStackReference < m_offsetFromTop);
178
179 unsigned stackDelta = m_offsetFromTop - positionBeforeStackReference;
180 m_assembler.addPtr(JSC::MacroAssembler::TrustedImm32(stackDelta), JSC::MacroAssembler::stackPointerRegister);
181 m_offsetFromTop -= stackDelta;
182 }
183
184 void alignStackPreFunctionCall()
185 {
186#if CPU(X86_64)
187 RELEASE_ASSERT(!m_hasFunctionCallPadding);
188 unsigned topAlignment = stackUnitInBytes();
189 if ((topAlignment + m_offsetFromTop) % 16) {
190 m_hasFunctionCallPadding = true;
191 m_assembler.addPtrNoFlags(JSC::MacroAssembler::TrustedImm32(-stackUnitInBytes()), JSC::MacroAssembler::stackPointerRegister);
192 }
193#endif
194 }
195
196 void unalignStackPostFunctionCall()
197 {
198#if CPU(X86_64)
199 if (m_hasFunctionCallPadding) {
200 m_assembler.addPtrNoFlags(JSC::MacroAssembler::TrustedImm32(stackUnitInBytes()), JSC::MacroAssembler::stackPointerRegister);
201 m_hasFunctionCallPadding = false;
202 }
203#endif
204 }
205
206 void merge(StackAllocator&& stackA, StackAllocator&& stackB)
207 {
208 RELEASE_ASSERT(stackA.m_offsetFromTop == stackB.m_offsetFromTop);
209 RELEASE_ASSERT(stackA.m_hasFunctionCallPadding == stackB.m_hasFunctionCallPadding);
210 ASSERT(&stackA.m_assembler == &stackB.m_assembler);
211 ASSERT(&m_assembler == &stackB.m_assembler);
212
213 m_offsetFromTop = stackA.m_offsetFromTop;
214 m_hasFunctionCallPadding = stackA.m_hasFunctionCallPadding;
215
216 stackA.reset();
217 stackB.reset();
218 }
219
220 void merge(StackAllocator&& stackA, StackAllocator&& stackB, StackAllocator&& stackC)
221 {
222 RELEASE_ASSERT(stackA.m_offsetFromTop == stackB.m_offsetFromTop);
223 RELEASE_ASSERT(stackA.m_offsetFromTop == stackC.m_offsetFromTop);
224 RELEASE_ASSERT(stackA.m_hasFunctionCallPadding == stackB.m_hasFunctionCallPadding);
225 RELEASE_ASSERT(stackA.m_hasFunctionCallPadding == stackC.m_hasFunctionCallPadding);
226 ASSERT(&stackA.m_assembler == &stackB.m_assembler);
227 ASSERT(&stackA.m_assembler == &stackC.m_assembler);
228 ASSERT(&m_assembler == &stackB.m_assembler);
229
230 m_offsetFromTop = stackA.m_offsetFromTop;
231 m_hasFunctionCallPadding = stackA.m_hasFunctionCallPadding;
232
233 stackA.reset();
234 stackB.reset();
235 stackC.reset();
236 }
237
238 JSC::MacroAssembler::Address addressOf(StackReference stackReference)
239 {
240 return JSC::MacroAssembler::Address(JSC::MacroAssembler::stackPointerRegister, offsetToStackReference(stackReference));
241 }
242
243 StackAllocator& operator=(const StackAllocator& other)
244 {
245 RELEASE_ASSERT(&m_assembler == &other.m_assembler);
246 m_offsetFromTop = other.m_offsetFromTop;
247 m_hasFunctionCallPadding = other.m_hasFunctionCallPadding;
248 return *this;
249 }
250
251
252private:
253 static unsigned stackUnitInBytes()
254 {
255 return JSC::MacroAssembler::pushToSaveByteOffset();
256 }
257
258 unsigned offsetToStackReference(StackReference stackReference)
259 {
260 RELEASE_ASSERT(m_offsetFromTop >= stackReference);
261 return m_offsetFromTop - stackReference;
262 }
263
264 void reset()
265 {
266 m_offsetFromTop = 0;
267 m_hasFunctionCallPadding = false;
268 }
269
270 JSC::MacroAssembler& m_assembler;
271 unsigned m_offsetFromTop;
272 bool m_hasFunctionCallPadding;
273};
274
275} // namespace WebCore
276
277#endif // ENABLE(CSS_SELECTOR_JIT)
278