blob: c4760429e491688f6f2992c585aaad5a6e7024bc [file] [log] [blame]
Ben Murdochc5610432016-08-08 18:44:38 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/bytecode-array-writer.h"
6
Ben Murdoch61f157c2016-09-16 13:49:30 +01007#include "src/api.h"
8#include "src/interpreter/bytecode-label.h"
9#include "src/interpreter/constant-array-builder.h"
10#include "src/log.h"
Ben Murdochc5610432016-08-08 18:44:38 +010011
12namespace v8 {
13namespace internal {
14namespace interpreter {
15
16BytecodeArrayWriter::BytecodeArrayWriter(
Ben Murdoch61f157c2016-09-16 13:49:30 +010017 Isolate* isolate, Zone* zone, ConstantArrayBuilder* constant_array_builder)
18 : isolate_(isolate),
19 bytecodes_(zone),
Ben Murdochc5610432016-08-08 18:44:38 +010020 max_register_count_(0),
Ben Murdoch61f157c2016-09-16 13:49:30 +010021 unbound_jumps_(0),
22 source_position_table_builder_(isolate, zone),
23 constant_array_builder_(constant_array_builder) {
24 LOG_CODE_EVENT(isolate_, CodeStartLinePosInfoRecordEvent(
25 source_position_table_builder()));
26}
Ben Murdochc5610432016-08-08 18:44:38 +010027
28// override
29BytecodeArrayWriter::~BytecodeArrayWriter() {}
30
31// override
Ben Murdoch61f157c2016-09-16 13:49:30 +010032Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
33 int fixed_register_count, int parameter_count,
34 Handle<FixedArray> handler_table) {
35 DCHECK_EQ(0, unbound_jumps_);
36
37 int bytecode_size = static_cast<int>(bytecodes()->size());
38
39 // All locals need a frame slot for the debugger, but may not be
40 // present in generated code.
41 int frame_size_for_locals = fixed_register_count * kPointerSize;
42 int frame_size_used = max_register_count() * kPointerSize;
43 int frame_size = std::max(frame_size_for_locals, frame_size_used);
44 Handle<FixedArray> constant_pool = constant_array_builder()->ToFixedArray();
45 Handle<ByteArray> source_position_table =
46 source_position_table_builder()->ToSourcePositionTable();
47 Handle<BytecodeArray> bytecode_array = isolate_->factory()->NewBytecodeArray(
48 bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
49 constant_pool);
50 bytecode_array->set_handler_table(*handler_table);
51 bytecode_array->set_source_position_table(*source_position_table);
52
53 void* line_info = source_position_table_builder()->DetachJITHandlerData();
54 LOG_CODE_EVENT(isolate_, CodeEndLinePosInfoRecordEvent(
55 AbstractCode::cast(*bytecode_array), line_info));
56 return bytecode_array;
57}
Ben Murdochc5610432016-08-08 18:44:38 +010058
59// override
60void BytecodeArrayWriter::Write(BytecodeNode* node) {
Ben Murdoch61f157c2016-09-16 13:49:30 +010061 DCHECK(!Bytecodes::IsJump(node->bytecode()));
Ben Murdochc5610432016-08-08 18:44:38 +010062 UpdateSourcePositionTable(node);
63 EmitBytecode(node);
64}
65
Ben Murdoch61f157c2016-09-16 13:49:30 +010066// override
67void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) {
68 DCHECK(Bytecodes::IsJump(node->bytecode()));
69 UpdateSourcePositionTable(node);
70 EmitJump(node, label);
71}
72
73// override
74void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) {
75 size_t current_offset = bytecodes()->size();
76 if (label->is_forward_target()) {
77 // An earlier jump instruction refers to this label. Update it's location.
78 PatchJump(current_offset, label->offset());
79 // Now treat as if the label will only be back referred to.
80 }
81 label->bind_to(current_offset);
82}
83
84// override
85void BytecodeArrayWriter::BindLabel(const BytecodeLabel& target,
86 BytecodeLabel* label) {
87 DCHECK(!label->is_bound());
88 DCHECK(target.is_bound());
89 if (label->is_forward_target()) {
90 // An earlier jump instruction refers to this label. Update it's location.
91 PatchJump(target.offset(), label->offset());
92 // Now treat as if the label will only be back referred to.
93 }
94 label->bind_to(target.offset());
95}
96
Ben Murdochc5610432016-08-08 18:44:38 +010097void BytecodeArrayWriter::UpdateSourcePositionTable(
98 const BytecodeNode* const node) {
99 int bytecode_offset = static_cast<int>(bytecodes()->size());
100 const BytecodeSourceInfo& source_info = node->source_info();
101 if (source_info.is_valid()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100102 source_position_table_builder()->AddPosition(bytecode_offset,
103 source_info.source_position(),
104 source_info.is_statement());
Ben Murdochc5610432016-08-08 18:44:38 +0100105 }
106}
107
Ben Murdoch61f157c2016-09-16 13:49:30 +0100108namespace {
109
110OperandScale ScaleForScalableByteOperand(OperandSize operand_size) {
111 STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
112 static_cast<int>(OperandScale::kSingle));
113 STATIC_ASSERT(static_cast<int>(OperandSize::kShort) ==
114 static_cast<int>(OperandScale::kDouble));
115 STATIC_ASSERT(static_cast<int>(OperandSize::kQuad) ==
116 static_cast<int>(OperandScale::kQuadruple));
117 return static_cast<OperandScale>(operand_size);
118}
119
120OperandScale OperandScaleForScalableSignedByte(uint32_t operand_value) {
121 int32_t signed_operand = static_cast<int32_t>(operand_value);
122 OperandSize bytes_required = Bytecodes::SizeForSignedOperand(signed_operand);
123 return ScaleForScalableByteOperand(bytes_required);
124}
125
126OperandScale OperandScaleForScalableUnsignedByte(uint32_t operand_value) {
127 OperandSize bytes_required = Bytecodes::SizeForUnsignedOperand(operand_value);
128 return ScaleForScalableByteOperand(bytes_required);
129}
130
131OperandScale GetOperandScale(const BytecodeNode* const node) {
132 const OperandTypeInfo* operand_type_infos =
133 Bytecodes::GetOperandTypeInfos(node->bytecode());
134 OperandScale operand_scale = OperandScale::kSingle;
135 for (int i = 0; i < node->operand_count(); ++i) {
136 switch (operand_type_infos[i]) {
137 case OperandTypeInfo::kScalableSignedByte: {
138 uint32_t operand = node->operand(i);
139 operand_scale =
140 std::max(operand_scale, OperandScaleForScalableSignedByte(operand));
141 break;
142 }
143 case OperandTypeInfo::kScalableUnsignedByte: {
144 uint32_t operand = node->operand(i);
145 operand_scale = std::max(operand_scale,
146 OperandScaleForScalableUnsignedByte(operand));
147 break;
148 }
149 case OperandTypeInfo::kFixedUnsignedByte:
150 case OperandTypeInfo::kFixedUnsignedShort:
151 break;
152 case OperandTypeInfo::kNone:
153 UNREACHABLE();
154 break;
155 }
156 }
157 return operand_scale;
158}
159
160} // namespace
161
Ben Murdochc5610432016-08-08 18:44:38 +0100162void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
163 DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
164
Ben Murdoch61f157c2016-09-16 13:49:30 +0100165 OperandScale operand_scale = GetOperandScale(node);
Ben Murdochc5610432016-08-08 18:44:38 +0100166 if (operand_scale != OperandScale::kSingle) {
167 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
168 bytecodes()->push_back(Bytecodes::ToByte(prefix));
169 }
170
171 Bytecode bytecode = node->bytecode();
172 bytecodes()->push_back(Bytecodes::ToByte(bytecode));
173
174 int register_operand_bitmap = Bytecodes::GetRegisterOperandBitmap(bytecode);
175 const uint32_t* const operands = node->operands();
Ben Murdoch61f157c2016-09-16 13:49:30 +0100176 const OperandSize* operand_sizes =
177 Bytecodes::GetOperandSizes(bytecode, operand_scale);
Ben Murdochc5610432016-08-08 18:44:38 +0100178 const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
179 for (int i = 0; operand_types[i] != OperandType::kNone; ++i) {
180 OperandType operand_type = operand_types[i];
Ben Murdoch61f157c2016-09-16 13:49:30 +0100181 switch (operand_sizes[i]) {
Ben Murdochc5610432016-08-08 18:44:38 +0100182 case OperandSize::kNone:
183 UNREACHABLE();
184 break;
185 case OperandSize::kByte:
186 bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
187 break;
188 case OperandSize::kShort: {
189 uint8_t operand_bytes[2];
190 WriteUnalignedUInt16(operand_bytes, operands[i]);
191 bytecodes()->insert(bytecodes()->end(), operand_bytes,
192 operand_bytes + 2);
193 break;
194 }
195 case OperandSize::kQuad: {
196 uint8_t operand_bytes[4];
197 WriteUnalignedUInt32(operand_bytes, operands[i]);
198 bytecodes()->insert(bytecodes()->end(), operand_bytes,
199 operand_bytes + 4);
200 break;
201 }
202 }
203
204 if ((register_operand_bitmap >> i) & 1) {
205 int count;
206 if (operand_types[i + 1] == OperandType::kRegCount) {
207 count = static_cast<int>(operands[i + 1]);
208 } else {
209 count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
210 }
211 Register reg = Register::FromOperand(static_cast<int32_t>(operands[i]));
212 max_register_count_ = std::max(max_register_count_, reg.index() + count);
213 }
214 }
215}
216
Ben Murdoch61f157c2016-09-16 13:49:30 +0100217// static
218Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) {
219 switch (jump_bytecode) {
220 case Bytecode::kJump:
221 return Bytecode::kJumpConstant;
222 case Bytecode::kJumpIfTrue:
223 return Bytecode::kJumpIfTrueConstant;
224 case Bytecode::kJumpIfFalse:
225 return Bytecode::kJumpIfFalseConstant;
226 case Bytecode::kJumpIfToBooleanTrue:
227 return Bytecode::kJumpIfToBooleanTrueConstant;
228 case Bytecode::kJumpIfToBooleanFalse:
229 return Bytecode::kJumpIfToBooleanFalseConstant;
230 case Bytecode::kJumpIfNotHole:
231 return Bytecode::kJumpIfNotHoleConstant;
232 case Bytecode::kJumpIfNull:
233 return Bytecode::kJumpIfNullConstant;
234 case Bytecode::kJumpIfUndefined:
235 return Bytecode::kJumpIfUndefinedConstant;
236 default:
237 UNREACHABLE();
238 return Bytecode::kIllegal;
239 }
240}
Ben Murdochc5610432016-08-08 18:44:38 +0100241
Ben Murdoch61f157c2016-09-16 13:49:30 +0100242void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location,
243 int delta) {
244 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
245 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
246 size_t operand_location = jump_location + 1;
247 DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
248 if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) {
249 // The jump fits within the range of an Imm operand, so cancel
250 // the reservation and jump directly.
251 constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
252 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
253 } else {
254 // The jump does not fit within the range of an Imm operand, so
255 // commit reservation putting the offset into the constant pool,
256 // and update the jump instruction and operand.
257 size_t entry = constant_array_builder()->CommitReservedEntry(
258 OperandSize::kByte, handle(Smi::FromInt(delta), isolate()));
259 DCHECK_LE(entry, kMaxUInt32);
260 DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
261 OperandSize::kByte);
262 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
263 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
264 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry);
265 }
266}
267
268void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
269 int delta) {
270 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
271 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
272 size_t operand_location = jump_location + 1;
273 uint8_t operand_bytes[2];
274 if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) {
275 constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
276 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
277 } else {
278 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
279 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
280 size_t entry = constant_array_builder()->CommitReservedEntry(
281 OperandSize::kShort, handle(Smi::FromInt(delta), isolate()));
282 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
283 }
284 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
285 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
286 bytecodes()->at(operand_location++) = operand_bytes[0];
287 bytecodes()->at(operand_location) = operand_bytes[1];
288}
289
290void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location,
291 int delta) {
292 DCHECK(Bytecodes::IsJumpImmediate(
293 Bytecodes::FromByte(bytecodes()->at(jump_location))));
294 constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
295 uint8_t operand_bytes[4];
296 WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta));
297 size_t operand_location = jump_location + 1;
298 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
299 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
300 bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder &&
301 bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder);
302 bytecodes()->at(operand_location++) = operand_bytes[0];
303 bytecodes()->at(operand_location++) = operand_bytes[1];
304 bytecodes()->at(operand_location++) = operand_bytes[2];
305 bytecodes()->at(operand_location) = operand_bytes[3];
306}
307
308void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) {
309 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
310 int delta = static_cast<int>(jump_target - jump_location);
311 int prefix_offset = 0;
312 OperandScale operand_scale = OperandScale::kSingle;
313 if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
314 // If a prefix scaling bytecode is emitted the target offset is one
315 // less than the case of no prefix scaling bytecode.
316 delta -= 1;
317 prefix_offset = 1;
318 operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
319 jump_bytecode =
320 Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset));
321 }
322
323 DCHECK(Bytecodes::IsJump(jump_bytecode));
324 switch (operand_scale) {
325 case OperandScale::kSingle:
326 PatchJumpWith8BitOperand(jump_location, delta);
327 break;
328 case OperandScale::kDouble:
329 PatchJumpWith16BitOperand(jump_location + prefix_offset, delta);
330 break;
331 case OperandScale::kQuadruple:
332 PatchJumpWith32BitOperand(jump_location + prefix_offset, delta);
333 break;
334 default:
335 UNREACHABLE();
336 }
337 unbound_jumps_--;
338}
339
340void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) {
341 DCHECK(Bytecodes::IsJump(node->bytecode()));
342 DCHECK_EQ(0, node->operand(0));
343
344 size_t current_offset = bytecodes()->size();
345
346 if (label->is_bound()) {
347 CHECK_GE(current_offset, label->offset());
348 CHECK_LE(current_offset, static_cast<size_t>(kMaxInt));
349 // Label has been bound already so this is a backwards jump.
350 size_t abs_delta = current_offset - label->offset();
351 int delta = -static_cast<int>(abs_delta);
352 OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta);
353 if (operand_size > OperandSize::kByte) {
354 // Adjust for scaling byte prefix for wide jump offset.
355 DCHECK_LE(delta, 0);
356 delta -= 1;
357 }
358 node->set_bytecode(node->bytecode(), delta);
359 } else {
360 // The label has not yet been bound so this is a forward reference
361 // that will be patched when the label is bound. We create a
362 // reservation in the constant pool so the jump can be patched
363 // when the label is bound. The reservation means the maximum size
364 // of the operand for the constant is known and the jump can
365 // be emitted into the bytecode stream with space for the operand.
366 unbound_jumps_++;
367 label->set_referrer(current_offset);
368 OperandSize reserved_operand_size =
369 constant_array_builder()->CreateReservedEntry();
370 switch (reserved_operand_size) {
371 case OperandSize::kNone:
372 UNREACHABLE();
373 break;
374 case OperandSize::kByte:
375 node->set_bytecode(node->bytecode(), k8BitJumpPlaceholder);
376 break;
377 case OperandSize::kShort:
378 node->set_bytecode(node->bytecode(), k16BitJumpPlaceholder);
379 break;
380 case OperandSize::kQuad:
381 node->set_bytecode(node->bytecode(), k32BitJumpPlaceholder);
382 break;
383 }
384 }
385 EmitBytecode(node);
Ben Murdochc5610432016-08-08 18:44:38 +0100386}
387
388} // namespace interpreter
389} // namespace internal
390} // namespace v8