1/*
2 * Copyright (C) 2014-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "ScratchRegisterAllocator.h"
28
29#if ENABLE(JIT)
30
31#include "JSCInlines.h"
32#include "MaxFrameExtentForSlowPathCall.h"
33#include "VM.h"
34
35namespace JSC {
36
37ScratchRegisterAllocator::ScratchRegisterAllocator(const RegisterSet& usedRegisters)
38 : m_usedRegisters(usedRegisters)
39 , m_numberOfReusedRegisters(0)
40{
41}
42
43ScratchRegisterAllocator::~ScratchRegisterAllocator() { }
44
45void ScratchRegisterAllocator::lock(GPRReg reg)
46{
47 if (reg == InvalidGPRReg)
48 return;
49 unsigned index = GPRInfo::toIndex(reg);
50 if (index == GPRInfo::InvalidIndex)
51 return;
52 m_lockedRegisters.setGPRByIndex(index);
53}
54
55void ScratchRegisterAllocator::lock(FPRReg reg)
56{
57 if (reg == InvalidFPRReg)
58 return;
59 unsigned index = FPRInfo::toIndex(reg);
60 if (index == FPRInfo::InvalidIndex)
61 return;
62 m_lockedRegisters.setFPRByIndex(index);
63}
64
65void ScratchRegisterAllocator::lock(JSValueRegs regs)
66{
67 lock(regs.tagGPR());
68 lock(regs.payloadGPR());
69}
70
71template<typename BankInfo>
72typename BankInfo::RegisterType ScratchRegisterAllocator::allocateScratch()
73{
74 // First try to allocate a register that is totally free.
75 for (unsigned i = 0; i < BankInfo::numberOfRegisters; ++i) {
76 typename BankInfo::RegisterType reg = BankInfo::toRegister(i);
77 if (!m_lockedRegisters.get(reg)
78 && !m_usedRegisters.get(reg)
79 && !m_scratchRegisters.get(reg)) {
80 m_scratchRegisters.set(reg);
81 return reg;
82 }
83 }
84
85 // Since that failed, try to allocate a register that is not yet
86 // locked or used for scratch.
87 for (unsigned i = 0; i < BankInfo::numberOfRegisters; ++i) {
88 typename BankInfo::RegisterType reg = BankInfo::toRegister(i);
89 if (!m_lockedRegisters.get(reg) && !m_scratchRegisters.get(reg)) {
90 m_scratchRegisters.set(reg);
91 m_numberOfReusedRegisters++;
92 return reg;
93 }
94 }
95
96 // We failed.
97 CRASH();
98 // Make some silly compilers happy.
99 return static_cast<typename BankInfo::RegisterType>(-1);
100}
101
102GPRReg ScratchRegisterAllocator::allocateScratchGPR() { return allocateScratch<GPRInfo>(); }
103FPRReg ScratchRegisterAllocator::allocateScratchFPR() { return allocateScratch<FPRInfo>(); }
104
105ScratchRegisterAllocator::PreservedState ScratchRegisterAllocator::preserveReusedRegistersByPushing(MacroAssembler& jit, ExtraStackSpace extraStackSpace)
106{
107 if (!didReuseRegisters())
108 return PreservedState(0, extraStackSpace);
109
110 RegisterSet registersToSpill;
111 for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
112 FPRReg reg = FPRInfo::toRegister(i);
113 if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg))
114 registersToSpill.set(reg);
115 }
116 for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
117 GPRReg reg = GPRInfo::toRegister(i);
118 if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg))
119 registersToSpill.set(reg);
120 }
121
122 unsigned extraStackBytesAtTopOfStack = extraStackSpace == ExtraStackSpace::SpaceForCCall ? maxFrameExtentForSlowPathCall : 0;
123 unsigned stackAdjustmentSize = ScratchRegisterAllocator::preserveRegistersToStackForCall(jit, registersToSpill, extraStackBytesAtTopOfStack);
124
125 return PreservedState(stackAdjustmentSize, extraStackSpace);
126}
127
128void ScratchRegisterAllocator::restoreReusedRegistersByPopping(MacroAssembler& jit, const ScratchRegisterAllocator::PreservedState& preservedState)
129{
130 RELEASE_ASSERT(preservedState);
131 if (!didReuseRegisters())
132 return;
133
134 RegisterSet registersToFill;
135 for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
136 GPRReg reg = GPRInfo::toRegister(i);
137 if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg))
138 registersToFill.set(reg);
139 }
140 for (unsigned i = FPRInfo::numberOfRegisters; i--;) {
141 FPRReg reg = FPRInfo::toRegister(i);
142 if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg))
143 registersToFill.set(reg);
144 }
145
146 unsigned extraStackBytesAtTopOfStack =
147 preservedState.extraStackSpaceRequirement == ExtraStackSpace::SpaceForCCall ? maxFrameExtentForSlowPathCall : 0;
148 RegisterSet dontRestore; // Empty set. We want to restore everything.
149 ScratchRegisterAllocator::restoreRegistersFromStackForCall(jit, registersToFill, dontRestore,
150 preservedState.numberOfBytesPreserved, extraStackBytesAtTopOfStack);
151}
152
153RegisterSet ScratchRegisterAllocator::usedRegistersForCall() const
154{
155 RegisterSet result = m_usedRegisters;
156 result.exclude(RegisterSet::registersToNotSaveForJSCall());
157 return result;
158}
159
160unsigned ScratchRegisterAllocator::desiredScratchBufferSizeForCall() const
161{
162 return usedRegistersForCall().numberOfSetRegisters() * sizeof(JSValue);
163}
164
165void ScratchRegisterAllocator::preserveUsedRegistersToScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR)
166{
167 RegisterSet usedRegisters = usedRegistersForCall();
168 if (!usedRegisters.numberOfSetRegisters())
169 return;
170
171 unsigned count = 0;
172 for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
173 if (usedRegisters.get(reg)) {
174 jit.storePtr(reg, static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + count);
175 count++;
176 }
177 if (GPRInfo::toIndex(reg) != GPRInfo::InvalidIndex
178 && scratchGPR == InvalidGPRReg
179 && !m_lockedRegisters.get(reg) && !m_scratchRegisters.get(reg))
180 scratchGPR = reg;
181 }
182 RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
183 for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
184 if (usedRegisters.get(reg)) {
185 jit.move(MacroAssembler::TrustedImmPtr(static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + count), scratchGPR);
186 count++;
187 jit.storeDouble(reg, scratchGPR);
188 }
189 }
190 RELEASE_ASSERT(count * sizeof(JSValue) == desiredScratchBufferSizeForCall());
191
192 jit.move(MacroAssembler::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), scratchGPR);
193 jit.storePtr(MacroAssembler::TrustedImmPtr(static_cast<size_t>(count * sizeof(JSValue))), scratchGPR);
194}
195
196void ScratchRegisterAllocator::restoreUsedRegistersFromScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR)
197{
198 RegisterSet usedRegisters = usedRegistersForCall();
199 if (!usedRegisters.numberOfSetRegisters())
200 return;
201
202 if (scratchGPR == InvalidGPRReg) {
203 // Find a scratch register.
204 for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
205 if (m_lockedRegisters.getGPRByIndex(i) || m_scratchRegisters.getGPRByIndex(i))
206 continue;
207 scratchGPR = GPRInfo::toRegister(i);
208 break;
209 }
210 }
211 RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
212
213 jit.move(MacroAssembler::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), scratchGPR);
214 jit.storePtr(MacroAssembler::TrustedImmPtr(nullptr), scratchGPR);
215
216 // Restore double registers first.
217 unsigned count = usedRegisters.numberOfSetGPRs();
218 for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
219 if (usedRegisters.get(reg)) {
220 jit.move(MacroAssembler::TrustedImmPtr(static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + (count++)), scratchGPR);
221 jit.loadDouble(scratchGPR, reg);
222 }
223 }
224
225 count = 0;
226 for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
227 if (usedRegisters.get(reg))
228 jit.loadPtr(static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + (count++), reg);
229 }
230}
231
232unsigned ScratchRegisterAllocator::preserveRegistersToStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, unsigned extraBytesAtTopOfStack)
233{
234 RELEASE_ASSERT(extraBytesAtTopOfStack % sizeof(void*) == 0);
235 if (!usedRegisters.numberOfSetRegisters())
236 return 0;
237
238 unsigned stackOffset = (usedRegisters.numberOfSetRegisters()) * sizeof(EncodedJSValue);
239 stackOffset += extraBytesAtTopOfStack;
240 stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), stackOffset);
241 jit.subPtr(
242 MacroAssembler::TrustedImm32(stackOffset),
243 MacroAssembler::stackPointerRegister);
244
245 unsigned count = 0;
246 for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
247 if (usedRegisters.get(reg)) {
248 jit.storePtr(reg, MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (count * sizeof(EncodedJSValue))));
249 count++;
250 }
251 }
252 for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
253 if (usedRegisters.get(reg)) {
254 jit.storeDouble(reg, MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (count * sizeof(EncodedJSValue))));
255 count++;
256 }
257 }
258
259 RELEASE_ASSERT(count == usedRegisters.numberOfSetRegisters());
260
261 return stackOffset;
262}
263
264void ScratchRegisterAllocator::restoreRegistersFromStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, const RegisterSet& ignore, unsigned numberOfStackBytesUsedForRegisterPreservation, unsigned extraBytesAtTopOfStack)
265{
266 RELEASE_ASSERT(extraBytesAtTopOfStack % sizeof(void*) == 0);
267 if (!usedRegisters.numberOfSetRegisters()) {
268 RELEASE_ASSERT(numberOfStackBytesUsedForRegisterPreservation == 0);
269 return;
270 }
271
272 unsigned count = 0;
273 for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
274 if (usedRegisters.get(reg)) {
275 if (!ignore.get(reg))
276 jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (sizeof(EncodedJSValue) * count)), reg);
277 count++;
278 }
279 }
280 for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
281 if (usedRegisters.get(reg)) {
282 if (!ignore.get(reg))
283 jit.loadDouble(MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (sizeof(EncodedJSValue) * count)), reg);
284 count++;
285 }
286 }
287
288 unsigned stackOffset = (usedRegisters.numberOfSetRegisters()) * sizeof(EncodedJSValue);
289 stackOffset += extraBytesAtTopOfStack;
290 stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), stackOffset);
291
292 RELEASE_ASSERT(count == usedRegisters.numberOfSetRegisters());
293 RELEASE_ASSERT(stackOffset == numberOfStackBytesUsedForRegisterPreservation);
294
295 jit.addPtr(
296 MacroAssembler::TrustedImm32(stackOffset),
297 MacroAssembler::stackPointerRegister);
298}
299
300} // namespace JSC
301
302#endif // ENABLE(JIT)
303