1/*
2 * Copyright (C) 2012-2019 Apple Inc. All Rights Reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
28#include "UnlinkedCodeBlock.h"
29
30#include "BytecodeGenerator.h"
31#include "BytecodeLivenessAnalysis.h"
32#include "BytecodeRewriter.h"
33#include "ClassInfo.h"
34#include "CodeCache.h"
35#include "ExecutableInfo.h"
36#include "FunctionOverrides.h"
37#include "InstructionStream.h"
38#include "JSCInlines.h"
39#include "JSString.h"
40#include "Opcode.h"
41#include "Parser.h"
42#include "PreciseJumpTargetsInlines.h"
43#include "SourceProvider.h"
44#include "Structure.h"
45#include "SymbolTable.h"
46#include "UnlinkedEvalCodeBlock.h"
47#include "UnlinkedFunctionCodeBlock.h"
48#include "UnlinkedMetadataTableInlines.h"
49#include "UnlinkedModuleProgramCodeBlock.h"
50#include "UnlinkedProgramCodeBlock.h"
51#include <wtf/DataLog.h>
52
53namespace JSC {
54
55const ClassInfo UnlinkedCodeBlock::s_info = { "UnlinkedCodeBlock", nullptr, nullptr, nullptr, CREATE_METHOD_TABLE(UnlinkedCodeBlock) };
56
57UnlinkedCodeBlock::UnlinkedCodeBlock(VM* vm, Structure* structure, CodeType codeType, const ExecutableInfo& info, OptionSet<CodeGenerationMode> codeGenerationMode)
58 : Base(*vm, structure)
59 , m_usesEval(info.usesEval())
60 , m_isStrictMode(info.isStrictMode())
61 , m_isConstructor(info.isConstructor())
62 , m_hasCapturedVariables(false)
63 , m_isBuiltinFunction(info.isBuiltinFunction())
64 , m_superBinding(static_cast<unsigned>(info.superBinding()))
65 , m_scriptMode(static_cast<unsigned>(info.scriptMode()))
66 , m_isArrowFunctionContext(info.isArrowFunctionContext())
67 , m_isClassContext(info.isClassContext())
68 , m_hasTailCalls(false)
69 , m_constructorKind(static_cast<unsigned>(info.constructorKind()))
70 , m_derivedContextType(static_cast<unsigned>(info.derivedContextType()))
71 , m_evalContextType(static_cast<unsigned>(info.evalContextType()))
72 , m_codeType(static_cast<unsigned>(codeType))
73 , m_didOptimize(static_cast<unsigned>(MixedTriState))
74 , m_parseMode(info.parseMode())
75 , m_codeGenerationMode(codeGenerationMode)
76 , m_metadata(UnlinkedMetadataTable::create())
77{
78 for (auto& constantRegisterIndex : m_linkTimeConstants)
79 constantRegisterIndex = 0;
80 ASSERT(m_constructorKind == static_cast<unsigned>(info.constructorKind()));
81 ASSERT(m_codeType == static_cast<unsigned>(codeType));
82 ASSERT(m_didOptimize == static_cast<unsigned>(MixedTriState));
83}
84
85void UnlinkedCodeBlock::visitChildren(JSCell* cell, SlotVisitor& visitor)
86{
87 UnlinkedCodeBlock* thisObject = jsCast<UnlinkedCodeBlock*>(cell);
88 ASSERT_GC_OBJECT_INHERITS(thisObject, info());
89 Base::visitChildren(thisObject, visitor);
90 auto locker = holdLock(thisObject->cellLock());
91 for (FunctionExpressionVector::iterator ptr = thisObject->m_functionDecls.begin(), end = thisObject->m_functionDecls.end(); ptr != end; ++ptr)
92 visitor.append(*ptr);
93 for (FunctionExpressionVector::iterator ptr = thisObject->m_functionExprs.begin(), end = thisObject->m_functionExprs.end(); ptr != end; ++ptr)
94 visitor.append(*ptr);
95 visitor.appendValues(thisObject->m_constantRegisters.data(), thisObject->m_constantRegisters.size());
96 size_t extraMemory = thisObject->m_metadata->sizeInBytes();
97 if (thisObject->m_instructions)
98 extraMemory += thisObject->m_instructions->sizeInBytes();
99 visitor.reportExtraMemoryVisited(extraMemory);
100}
101
102size_t UnlinkedCodeBlock::estimatedSize(JSCell* cell, VM& vm)
103{
104 UnlinkedCodeBlock* thisObject = jsCast<UnlinkedCodeBlock*>(cell);
105 size_t extraSize = thisObject->m_metadata->sizeInBytes();
106 if (thisObject->m_instructions)
107 extraSize += thisObject->m_instructions->sizeInBytes();
108 return Base::estimatedSize(cell, vm) + extraSize;
109}
110
111int UnlinkedCodeBlock::lineNumberForBytecodeOffset(unsigned bytecodeOffset)
112{
113 ASSERT(bytecodeOffset < instructions().size());
114 int divot { 0 };
115 int startOffset { 0 };
116 int endOffset { 0 };
117 unsigned line { 0 };
118 unsigned column { 0 };
119 expressionRangeForBytecodeOffset(bytecodeOffset, divot, startOffset, endOffset, line, column);
120 return line;
121}
122
123inline void UnlinkedCodeBlock::getLineAndColumn(const ExpressionRangeInfo& info,
124 unsigned& line, unsigned& column) const
125{
126 switch (info.mode) {
127 case ExpressionRangeInfo::FatLineMode:
128 info.decodeFatLineMode(line, column);
129 break;
130 case ExpressionRangeInfo::FatColumnMode:
131 info.decodeFatColumnMode(line, column);
132 break;
133 case ExpressionRangeInfo::FatLineAndColumnMode: {
134 unsigned fatIndex = info.position;
135 ExpressionRangeInfo::FatPosition& fatPos = m_rareData->m_expressionInfoFatPositions[fatIndex];
136 line = fatPos.line;
137 column = fatPos.column;
138 break;
139 }
140 } // switch
141}
142
143#ifndef NDEBUG
144static void dumpLineColumnEntry(size_t index, const InstructionStream& instructionStream, unsigned instructionOffset, unsigned line, unsigned column)
145{
146 const auto instruction = instructionStream.at(instructionOffset);
147 const char* event = "";
148 if (instruction->is<OpDebug>()) {
149 switch (instruction->as<OpDebug>().m_debugHookType) {
150 case WillExecuteProgram: event = " WillExecuteProgram"; break;
151 case DidExecuteProgram: event = " DidExecuteProgram"; break;
152 case DidEnterCallFrame: event = " DidEnterCallFrame"; break;
153 case DidReachBreakpoint: event = " DidReachBreakpoint"; break;
154 case WillLeaveCallFrame: event = " WillLeaveCallFrame"; break;
155 case WillExecuteStatement: event = " WillExecuteStatement"; break;
156 case WillExecuteExpression: event = " WillExecuteExpression"; break;
157 }
158 }
159 dataLogF(" [%zu] pc %u @ line %u col %u : %s%s\n", index, instructionOffset, line, column, instruction->name(), event);
160}
161
162void UnlinkedCodeBlock::dumpExpressionRangeInfo()
163{
164 Vector<ExpressionRangeInfo>& expressionInfo = m_expressionInfo;
165
166 size_t size = m_expressionInfo.size();
167 dataLogF("UnlinkedCodeBlock %p expressionRangeInfo[%zu] {\n", this, size);
168 for (size_t i = 0; i < size; i++) {
169 ExpressionRangeInfo& info = expressionInfo[i];
170 unsigned line;
171 unsigned column;
172 getLineAndColumn(info, line, column);
173 dumpLineColumnEntry(i, instructions(), info.instructionOffset, line, column);
174 }
175 dataLog("}\n");
176}
177#endif
178
179void UnlinkedCodeBlock::expressionRangeForBytecodeOffset(unsigned bytecodeOffset,
180 int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column) const
181{
182 ASSERT(bytecodeOffset < instructions().size());
183
184 if (!m_expressionInfo.size()) {
185 startOffset = 0;
186 endOffset = 0;
187 divot = 0;
188 line = 0;
189 column = 0;
190 return;
191 }
192
193 const Vector<ExpressionRangeInfo>& expressionInfo = m_expressionInfo;
194
195 int low = 0;
196 int high = expressionInfo.size();
197 while (low < high) {
198 int mid = low + (high - low) / 2;
199 if (expressionInfo[mid].instructionOffset <= bytecodeOffset)
200 low = mid + 1;
201 else
202 high = mid;
203 }
204
205 if (!low)
206 low = 1;
207
208 const ExpressionRangeInfo& info = expressionInfo[low - 1];
209 startOffset = info.startOffset;
210 endOffset = info.endOffset;
211 divot = info.divotPoint;
212 getLineAndColumn(info, line, column);
213}
214
215void UnlinkedCodeBlock::addExpressionInfo(unsigned instructionOffset,
216 int divot, int startOffset, int endOffset, unsigned line, unsigned column)
217{
218 if (divot > ExpressionRangeInfo::MaxDivot) {
219 // Overflow has occurred, we can only give line number info for errors for this region
220 divot = 0;
221 startOffset = 0;
222 endOffset = 0;
223 } else if (startOffset > ExpressionRangeInfo::MaxOffset) {
224 // If the start offset is out of bounds we clear both offsets
225 // so we only get the divot marker. Error message will have to be reduced
226 // to line and charPosition number.
227 startOffset = 0;
228 endOffset = 0;
229 } else if (endOffset > ExpressionRangeInfo::MaxOffset) {
230 // The end offset is only used for additional context, and is much more likely
231 // to overflow (eg. function call arguments) so we are willing to drop it without
232 // dropping the rest of the range.
233 endOffset = 0;
234 }
235
236 unsigned positionMode =
237 (line <= ExpressionRangeInfo::MaxFatLineModeLine && column <= ExpressionRangeInfo::MaxFatLineModeColumn)
238 ? ExpressionRangeInfo::FatLineMode
239 : (line <= ExpressionRangeInfo::MaxFatColumnModeLine && column <= ExpressionRangeInfo::MaxFatColumnModeColumn)
240 ? ExpressionRangeInfo::FatColumnMode
241 : ExpressionRangeInfo::FatLineAndColumnMode;
242
243 ExpressionRangeInfo info;
244 info.instructionOffset = instructionOffset;
245 info.divotPoint = divot;
246 info.startOffset = startOffset;
247 info.endOffset = endOffset;
248
249 info.mode = positionMode;
250 switch (positionMode) {
251 case ExpressionRangeInfo::FatLineMode:
252 info.encodeFatLineMode(line, column);
253 break;
254 case ExpressionRangeInfo::FatColumnMode:
255 info.encodeFatColumnMode(line, column);
256 break;
257 case ExpressionRangeInfo::FatLineAndColumnMode: {
258 createRareDataIfNecessary();
259 unsigned fatIndex = m_rareData->m_expressionInfoFatPositions.size();
260 ExpressionRangeInfo::FatPosition fatPos = { line, column };
261 m_rareData->m_expressionInfoFatPositions.append(fatPos);
262 info.position = fatIndex;
263 }
264 } // switch
265
266 m_expressionInfo.append(info);
267}
268
269bool UnlinkedCodeBlock::typeProfilerExpressionInfoForBytecodeOffset(unsigned bytecodeOffset, unsigned& startDivot, unsigned& endDivot)
270{
271 static const bool verbose = false;
272 if (!m_rareData) {
273 if (verbose)
274 dataLogF("Don't have assignment info for offset:%u\n", bytecodeOffset);
275 startDivot = UINT_MAX;
276 endDivot = UINT_MAX;
277 return false;
278 }
279
280 auto iter = m_rareData->m_typeProfilerInfoMap.find(bytecodeOffset);
281 if (iter == m_rareData->m_typeProfilerInfoMap.end()) {
282 if (verbose)
283 dataLogF("Don't have assignment info for offset:%u\n", bytecodeOffset);
284 startDivot = UINT_MAX;
285 endDivot = UINT_MAX;
286 return false;
287 }
288
289 RareData::TypeProfilerExpressionRange& range = iter->value;
290 startDivot = range.m_startDivot;
291 endDivot = range.m_endDivot;
292 return true;
293}
294
295void UnlinkedCodeBlock::addTypeProfilerExpressionInfo(unsigned instructionOffset, unsigned startDivot, unsigned endDivot)
296{
297 createRareDataIfNecessary();
298 RareData::TypeProfilerExpressionRange range;
299 range.m_startDivot = startDivot;
300 range.m_endDivot = endDivot;
301 m_rareData->m_typeProfilerInfoMap.set(instructionOffset, range);
302}
303
304UnlinkedCodeBlock::~UnlinkedCodeBlock()
305{
306}
307
308void UnlinkedCodeBlock::setInstructions(std::unique_ptr<InstructionStream> instructions)
309{
310 ASSERT(instructions);
311 {
312 auto locker = holdLock(cellLock());
313 m_instructions = WTFMove(instructions);
314 m_metadata->finalize();
315 }
316 Heap::heap(this)->reportExtraMemoryAllocated(m_instructions->sizeInBytes() + m_metadata->sizeInBytes());
317}
318
319const InstructionStream& UnlinkedCodeBlock::instructions() const
320{
321 ASSERT(m_instructions.get());
322 return *m_instructions;
323}
324
325UnlinkedHandlerInfo* UnlinkedCodeBlock::handlerForBytecodeOffset(unsigned bytecodeOffset, RequiredHandler requiredHandler)
326{
327 return handlerForIndex(bytecodeOffset, requiredHandler);
328}
329
330UnlinkedHandlerInfo* UnlinkedCodeBlock::handlerForIndex(unsigned index, RequiredHandler requiredHandler)
331{
332 if (!m_rareData)
333 return nullptr;
334 return UnlinkedHandlerInfo::handlerForIndex(m_rareData->m_exceptionHandlers, index, requiredHandler);
335}
336
337void UnlinkedCodeBlock::applyModification(BytecodeRewriter& rewriter, InstructionStreamWriter& instructions)
338{
339 // Before applying the changes, we adjust the jumps based on the original bytecode offset, the offset to the jump target, and
340 // the insertion information.
341
342 rewriter.adjustJumpTargets();
343
344 // Then, exception handlers should be adjusted.
345 if (m_rareData) {
346 for (UnlinkedHandlerInfo& handler : m_rareData->m_exceptionHandlers) {
347 handler.target = rewriter.adjustAbsoluteOffset(handler.target);
348 handler.start = rewriter.adjustAbsoluteOffset(handler.start);
349 handler.end = rewriter.adjustAbsoluteOffset(handler.end);
350 }
351
352 for (size_t i = 0; i < m_rareData->m_opProfileControlFlowBytecodeOffsets.size(); ++i)
353 m_rareData->m_opProfileControlFlowBytecodeOffsets[i] = rewriter.adjustAbsoluteOffset(m_rareData->m_opProfileControlFlowBytecodeOffsets[i]);
354
355 if (!m_rareData->m_typeProfilerInfoMap.isEmpty()) {
356 HashMap<unsigned, RareData::TypeProfilerExpressionRange> adjustedTypeProfilerInfoMap;
357 for (auto& entry : m_rareData->m_typeProfilerInfoMap)
358 adjustedTypeProfilerInfoMap.set(rewriter.adjustAbsoluteOffset(entry.key), entry.value);
359 m_rareData->m_typeProfilerInfoMap.swap(adjustedTypeProfilerInfoMap);
360 }
361 }
362
363 for (size_t i = 0; i < m_propertyAccessInstructions.size(); ++i)
364 m_propertyAccessInstructions[i] = rewriter.adjustAbsoluteOffset(m_propertyAccessInstructions[i]);
365
366 for (size_t i = 0; i < m_expressionInfo.size(); ++i)
367 m_expressionInfo[i].instructionOffset = rewriter.adjustAbsoluteOffset(m_expressionInfo[i].instructionOffset);
368
369 // Then, modify the unlinked instructions.
370 rewriter.applyModification();
371
372 // And recompute the jump target based on the modified unlinked instructions.
373 m_jumpTargets.clear();
374 recomputePreciseJumpTargets(this, instructions, m_jumpTargets);
375}
376
377void UnlinkedCodeBlock::shrinkToFit()
378{
379 auto locker = holdLock(cellLock());
380
381 m_jumpTargets.shrinkToFit();
382 m_propertyAccessInstructions.shrinkToFit();
383 m_identifiers.shrinkToFit();
384 m_constantRegisters.shrinkToFit();
385 m_constantsSourceCodeRepresentation.shrinkToFit();
386 m_functionDecls.shrinkToFit();
387 m_functionExprs.shrinkToFit();
388 m_expressionInfo.shrinkToFit();
389
390 if (m_rareData) {
391 m_rareData->m_exceptionHandlers.shrinkToFit();
392 m_rareData->m_switchJumpTables.shrinkToFit();
393 m_rareData->m_stringSwitchJumpTables.shrinkToFit();
394 m_rareData->m_expressionInfoFatPositions.shrinkToFit();
395 m_rareData->m_opProfileControlFlowBytecodeOffsets.shrinkToFit();
396 m_rareData->m_bitVectors.shrinkToFit();
397 m_rareData->m_constantIdentifierSets.shrinkToFit();
398 }
399}
400
401void UnlinkedCodeBlock::dump(PrintStream&) const
402{
403}
404
405BytecodeLivenessAnalysis& UnlinkedCodeBlock::livenessAnalysisSlow(CodeBlock* codeBlock)
406{
407 RELEASE_ASSERT(codeBlock->unlinkedCodeBlock() == this);
408
409 {
410 ConcurrentJSLocker locker(m_lock);
411 if (!m_liveness) {
412 // There is a chance two compiler threads raced to the slow path.
413 // Grabbing the lock above defends against computing liveness twice.
414 m_liveness = std::make_unique<BytecodeLivenessAnalysis>(codeBlock);
415 }
416 }
417
418 return *m_liveness;
419}
420
421void UnlinkedCodeBlock::addOutOfLineJumpTarget(InstructionStream::Offset bytecodeOffset, int target)
422{
423 RELEASE_ASSERT(target);
424 m_outOfLineJumpTargets.set(bytecodeOffset, target);
425}
426
427int UnlinkedCodeBlock::outOfLineJumpOffset(InstructionStream::Offset bytecodeOffset)
428{
429 ASSERT(m_outOfLineJumpTargets.contains(bytecodeOffset));
430 return m_outOfLineJumpTargets.get(bytecodeOffset);
431}
432
433} // namespace JSC
434