blob: ee5f8be27981f19b63ddf8eb3029729cb147a7fa [file] [log] [blame]
Ben Murdoch097c5b22016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter-assembler.h"
6
Ben Murdochc5610432016-08-08 18:44:38 +01007#include <limits>
Ben Murdoch097c5b22016-05-18 11:27:45 +01008#include <ostream>
9
10#include "src/code-factory.h"
11#include "src/frames.h"
12#include "src/interface-descriptors.h"
13#include "src/interpreter/bytecodes.h"
14#include "src/interpreter/interpreter.h"
15#include "src/machine-type.h"
16#include "src/macro-assembler.h"
17#include "src/zone.h"
18
19namespace v8 {
20namespace internal {
21namespace interpreter {
22
23using compiler::Node;
24
25InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
Ben Murdochda12d292016-06-02 14:46:10 +010026 Bytecode bytecode,
27 OperandScale operand_scale)
Ben Murdochc5610432016-08-08 18:44:38 +010028 : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate),
29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
30 Bytecodes::ToString(bytecode),
31 Bytecodes::ReturnCount(bytecode)),
Ben Murdoch097c5b22016-05-18 11:27:45 +010032 bytecode_(bytecode),
Ben Murdochda12d292016-06-02 14:46:10 +010033 operand_scale_(operand_scale),
Ben Murdoch61f157c2016-09-16 13:49:30 +010034 interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
Ben Murdoch097c5b22016-05-18 11:27:45 +010035 accumulator_(this, MachineRepresentation::kTagged),
Ben Murdochda12d292016-06-02 14:46:10 +010036 accumulator_use_(AccumulatorUse::kNone),
Ben Murdochc5610432016-08-08 18:44:38 +010037 made_call_(false),
Ben Murdoch097c5b22016-05-18 11:27:45 +010038 disable_stack_check_across_call_(false),
39 stack_pointer_before_call_(nullptr) {
40 accumulator_.Bind(
41 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
Ben Murdoch097c5b22016-05-18 11:27:45 +010042 if (FLAG_trace_ignition) {
43 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
44 }
45}
46
Ben Murdochda12d292016-06-02 14:46:10 +010047InterpreterAssembler::~InterpreterAssembler() {
48 // If the following check fails the handler does not use the
49 // accumulator in the way described in the bytecode definitions in
50 // bytecodes.h.
51 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
52}
Ben Murdoch097c5b22016-05-18 11:27:45 +010053
Ben Murdoch61f157c2016-09-16 13:49:30 +010054Node* InterpreterAssembler::GetInterpretedFramePointer() {
55 if (!interpreted_frame_pointer_.IsBound()) {
56 interpreted_frame_pointer_.Bind(LoadParentFramePointer());
57 }
58 return interpreted_frame_pointer_.value();
59}
60
Ben Murdochda12d292016-06-02 14:46:10 +010061Node* InterpreterAssembler::GetAccumulatorUnchecked() {
62 return accumulator_.value();
63}
64
65Node* InterpreterAssembler::GetAccumulator() {
66 DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
67 accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
68 return GetAccumulatorUnchecked();
69}
Ben Murdoch097c5b22016-05-18 11:27:45 +010070
71void InterpreterAssembler::SetAccumulator(Node* value) {
Ben Murdochda12d292016-06-02 14:46:10 +010072 DCHECK(Bytecodes::WritesAccumulator(bytecode_));
73 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
Ben Murdoch097c5b22016-05-18 11:27:45 +010074 accumulator_.Bind(value);
75}
76
Ben Murdochc5610432016-08-08 18:44:38 +010077Node* InterpreterAssembler::GetContext() {
78 return LoadRegister(Register::current_context());
79}
Ben Murdoch097c5b22016-05-18 11:27:45 +010080
81void InterpreterAssembler::SetContext(Node* value) {
82 StoreRegister(value, Register::current_context());
Ben Murdoch097c5b22016-05-18 11:27:45 +010083}
84
85Node* InterpreterAssembler::BytecodeOffset() {
86 return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter);
87}
88
Ben Murdoch097c5b22016-05-18 11:27:45 +010089Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
Ben Murdochc5610432016-08-08 18:44:38 +010090 if (made_call_) {
91 // If we have made a call, restore bytecode array from stack frame in case
92 // the debugger has swapped us to the patched debugger bytecode array.
93 return LoadRegister(Register::bytecode_array());
94 } else {
95 return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter);
96 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010097}
98
99Node* InterpreterAssembler::DispatchTableRawPointer() {
100 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter);
101}
102
103Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100104 return IntPtrAdd(GetInterpretedFramePointer(),
105 RegisterFrameOffset(reg_index));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100106}
107
108Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
109 return WordShl(index, kPointerSizeLog2);
110}
111
Ben Murdochc5610432016-08-08 18:44:38 +0100112Node* InterpreterAssembler::LoadRegister(Register reg) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100113 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
Ben Murdochc5610432016-08-08 18:44:38 +0100114 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
115}
116
Ben Murdoch097c5b22016-05-18 11:27:45 +0100117Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100118 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100119 RegisterFrameOffset(reg_index));
120}
121
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
Ben Murdochc5610432016-08-08 18:44:38 +0100123 return StoreNoWriteBarrier(
Ben Murdoch61f157c2016-09-16 13:49:30 +0100124 MachineRepresentation::kTagged, GetInterpretedFramePointer(),
Ben Murdochc5610432016-08-08 18:44:38 +0100125 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100126}
127
128Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
129 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
Ben Murdoch61f157c2016-09-16 13:49:30 +0100130 GetInterpretedFramePointer(),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100131 RegisterFrameOffset(reg_index), value);
132}
133
134Node* InterpreterAssembler::NextRegister(Node* reg_index) {
135 // Register indexes are negative, so the next index is minus one.
Ben Murdochda12d292016-06-02 14:46:10 +0100136 return IntPtrAdd(reg_index, IntPtrConstant(-1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100137}
138
Ben Murdochda12d292016-06-02 14:46:10 +0100139Node* InterpreterAssembler::OperandOffset(int operand_index) {
140 return IntPtrConstant(
141 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100142}
143
Ben Murdochda12d292016-06-02 14:46:10 +0100144Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100145 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100146 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
147 bytecode_, operand_index, operand_scale()));
148 Node* operand_offset = OperandOffset(operand_index);
149 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
150 IntPtrAdd(BytecodeOffset(), operand_offset));
151}
152
153Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
154 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
155 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
156 bytecode_, operand_index, operand_scale()));
157 Node* operand_offset = OperandOffset(operand_index);
158 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
159 IntPtrAdd(BytecodeOffset(), operand_offset));
160
Ben Murdoch097c5b22016-05-18 11:27:45 +0100161 // Ensure that we sign extend to full pointer size
162 if (kPointerSize == 8) {
163 load = ChangeInt32ToInt64(load);
164 }
165 return load;
166}
167
Ben Murdochda12d292016-06-02 14:46:10 +0100168compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
169 int relative_offset, MachineType result_type) {
170 static const int kMaxCount = 4;
171 DCHECK(!TargetSupportsUnalignedAccess());
172
173 int count;
174 switch (result_type.representation()) {
175 case MachineRepresentation::kWord16:
176 count = 2;
177 break;
178 case MachineRepresentation::kWord32:
179 count = 4;
180 break;
181 default:
182 UNREACHABLE();
183 break;
184 }
185 MachineType msb_type =
186 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
187
Ben Murdoch097c5b22016-05-18 11:27:45 +0100188#if V8_TARGET_LITTLE_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100189 const int kStep = -1;
190 int msb_offset = count - 1;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100191#elif V8_TARGET_BIG_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100192 const int kStep = 1;
193 int msb_offset = 0;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100194#else
195#error "Unknown Architecture"
196#endif
Ben Murdochda12d292016-06-02 14:46:10 +0100197
198 // Read the most signicant bytecode into bytes[0] and then in order
199 // down to least significant in bytes[count - 1].
200 DCHECK(count <= kMaxCount);
201 compiler::Node* bytes[kMaxCount];
202 for (int i = 0; i < count; i++) {
203 MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
204 Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
205 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
206 bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
207 }
208
209 // Pack LSB to MSB.
210 Node* result = bytes[--count];
211 for (int i = 1; --count >= 0; i++) {
212 Node* shift = Int32Constant(i * kBitsPerByte);
213 Node* value = Word32Shl(bytes[count], shift);
214 result = Word32Or(value, result);
215 }
216 return result;
217}
218
219Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
220 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
221 DCHECK_EQ(
222 OperandSize::kShort,
223 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
224 int operand_offset =
225 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
226 if (TargetSupportsUnalignedAccess()) {
227 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
228 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
229 } else {
230 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100231 }
232}
233
Ben Murdochda12d292016-06-02 14:46:10 +0100234Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100235 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100236 DCHECK_EQ(
237 OperandSize::kShort,
238 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
239 int operand_offset =
240 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100241 Node* load;
242 if (TargetSupportsUnalignedAccess()) {
243 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100244 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100245 } else {
Ben Murdochda12d292016-06-02 14:46:10 +0100246 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100247 }
248
249 // Ensure that we sign extend to full pointer size
250 if (kPointerSize == 8) {
251 load = ChangeInt32ToInt64(load);
252 }
253 return load;
254}
255
Ben Murdochda12d292016-06-02 14:46:10 +0100256Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
257 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
258 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
259 bytecode_, operand_index, operand_scale()));
260 int operand_offset =
261 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
262 if (TargetSupportsUnalignedAccess()) {
263 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
264 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
265 } else {
266 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
267 }
268}
269
270Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
271 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
272 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
273 bytecode_, operand_index, operand_scale()));
274 int operand_offset =
275 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
276 Node* load;
277 if (TargetSupportsUnalignedAccess()) {
278 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
279 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
280 } else {
281 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
282 }
283
284 // Ensure that we sign extend to full pointer size
285 if (kPointerSize == 8) {
286 load = ChangeInt32ToInt64(load);
287 }
288 return load;
289}
290
291Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
292 OperandSize operand_size) {
293 DCHECK(!Bytecodes::IsUnsignedOperandType(
294 Bytecodes::GetOperandType(bytecode_, operand_index)));
295 switch (operand_size) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100296 case OperandSize::kByte:
Ben Murdochda12d292016-06-02 14:46:10 +0100297 return BytecodeOperandSignedByte(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100298 case OperandSize::kShort:
Ben Murdochda12d292016-06-02 14:46:10 +0100299 return BytecodeOperandSignedShort(operand_index);
300 case OperandSize::kQuad:
301 return BytecodeOperandSignedQuad(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100302 case OperandSize::kNone:
303 UNREACHABLE();
304 }
305 return nullptr;
306}
307
Ben Murdochda12d292016-06-02 14:46:10 +0100308Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
309 OperandSize operand_size) {
310 DCHECK(Bytecodes::IsUnsignedOperandType(
311 Bytecodes::GetOperandType(bytecode_, operand_index)));
312 switch (operand_size) {
313 case OperandSize::kByte:
314 return BytecodeOperandUnsignedByte(operand_index);
315 case OperandSize::kShort:
316 return BytecodeOperandUnsignedShort(operand_index);
317 case OperandSize::kQuad:
318 return BytecodeOperandUnsignedQuad(operand_index);
319 case OperandSize::kNone:
320 UNREACHABLE();
321 }
322 return nullptr;
323}
324
325Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
326 DCHECK_EQ(OperandType::kRegCount,
327 Bytecodes::GetOperandType(bytecode_, operand_index));
328 OperandSize operand_size =
329 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
330 return BytecodeUnsignedOperand(operand_index, operand_size);
331}
332
333Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
334 DCHECK_EQ(OperandType::kFlag8,
335 Bytecodes::GetOperandType(bytecode_, operand_index));
336 OperandSize operand_size =
337 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
338 DCHECK_EQ(operand_size, OperandSize::kByte);
339 return BytecodeUnsignedOperand(operand_index, operand_size);
340}
341
Ben Murdoch097c5b22016-05-18 11:27:45 +0100342Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100343 DCHECK_EQ(OperandType::kImm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100344 Bytecodes::GetOperandType(bytecode_, operand_index));
Ben Murdochda12d292016-06-02 14:46:10 +0100345 OperandSize operand_size =
346 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
347 return BytecodeSignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100348}
349
350Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100351 DCHECK(OperandType::kIdx ==
352 Bytecodes::GetOperandType(bytecode_, operand_index));
353 OperandSize operand_size =
354 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
355 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100356}
357
358Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100359 DCHECK(Bytecodes::IsRegisterOperandType(
360 Bytecodes::GetOperandType(bytecode_, operand_index)));
361 OperandSize operand_size =
362 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
363 return BytecodeSignedOperand(operand_index, operand_size);
364}
365
366Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
367 DCHECK(OperandType::kRuntimeId ==
368 Bytecodes::GetOperandType(bytecode_, operand_index));
369 OperandSize operand_size =
370 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
371 DCHECK_EQ(operand_size, OperandSize::kShort);
372 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100373}
374
Ben Murdoch61f157c2016-09-16 13:49:30 +0100375Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
376 DCHECK(OperandType::kIntrinsicId ==
377 Bytecodes::GetOperandType(bytecode_, operand_index));
378 OperandSize operand_size =
379 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
380 DCHECK_EQ(operand_size, OperandSize::kByte);
381 return BytecodeUnsignedOperand(operand_index, operand_size);
382}
383
Ben Murdoch097c5b22016-05-18 11:27:45 +0100384Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
385 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
386 BytecodeArray::kConstantPoolOffset);
387 Node* entry_offset =
388 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
389 WordShl(index, kPointerSizeLog2));
390 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
391}
392
Ben Murdoch097c5b22016-05-18 11:27:45 +0100393Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) {
394 return Load(MachineType::AnyTagged(), context,
395 IntPtrConstant(Context::SlotOffset(slot_index)));
396}
397
398Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) {
399 Node* offset =
400 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100401 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100402 return Load(MachineType::AnyTagged(), context, offset);
403}
404
405Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index,
406 Node* value) {
407 Node* offset =
408 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100409 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100410 return Store(MachineRepresentation::kTagged, context, offset, value);
411}
412
413Node* InterpreterAssembler::LoadTypeFeedbackVector() {
Ben Murdochc5610432016-08-08 18:44:38 +0100414 Node* function = LoadRegister(Register::function_closure());
Ben Murdoch61f157c2016-09-16 13:49:30 +0100415 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100416 Node* vector =
Ben Murdoch61f157c2016-09-16 13:49:30 +0100417 LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100418 return vector;
419}
420
421void InterpreterAssembler::CallPrologue() {
Ben Murdochc5610432016-08-08 18:44:38 +0100422 StoreRegister(SmiTag(BytecodeOffset()), Register::bytecode_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100423
424 if (FLAG_debug_code && !disable_stack_check_across_call_) {
425 DCHECK(stack_pointer_before_call_ == nullptr);
426 stack_pointer_before_call_ = LoadStackPointer();
427 }
Ben Murdochc5610432016-08-08 18:44:38 +0100428 made_call_ = true;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100429}
430
431void InterpreterAssembler::CallEpilogue() {
432 if (FLAG_debug_code && !disable_stack_check_across_call_) {
433 Node* stack_pointer_after_call = LoadStackPointer();
434 Node* stack_pointer_before_call = stack_pointer_before_call_;
435 stack_pointer_before_call_ = nullptr;
436 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
437 kUnexpectedStackPointer);
438 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100439}
440
441Node* InterpreterAssembler::CallJS(Node* function, Node* context,
442 Node* first_arg, Node* arg_count,
443 TailCallMode tail_call_mode) {
444 Callable callable =
445 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
446 Node* code_target = HeapConstant(callable.code());
447 return CallStub(callable.descriptor(), code_target, context, arg_count,
448 first_arg, function);
449}
450
451Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
452 Node* new_target, Node* first_arg,
453 Node* arg_count) {
454 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate());
455 Node* code_target = HeapConstant(callable.code());
456 return CallStub(callable.descriptor(), code_target, context, arg_count,
457 new_target, constructor, first_arg);
458}
459
460Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
461 Node* first_arg, Node* arg_count,
462 int result_size) {
463 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
464 Node* code_target = HeapConstant(callable.code());
465
466 // Get the function entry from the function id.
467 Node* function_table = ExternalConstant(
468 ExternalReference::runtime_function_table_address(isolate()));
469 Node* function_offset =
470 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
471 Node* function = IntPtrAdd(function_table, function_offset);
472 Node* function_entry =
473 Load(MachineType::Pointer(), function,
Ben Murdochda12d292016-06-02 14:46:10 +0100474 IntPtrConstant(offsetof(Runtime::Function, entry)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475
476 return CallStub(callable.descriptor(), code_target, context, arg_count,
477 first_arg, function_entry, result_size);
478}
479
480void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
Ben Murdochc5610432016-08-08 18:44:38 +0100481 Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100482 Node* budget_offset =
483 IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
484
485 // Update budget by |weight| and check if it reaches zero.
Ben Murdochc5610432016-08-08 18:44:38 +0100486 Variable new_budget(this, MachineRepresentation::kWord32);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100487 Node* old_budget =
488 Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
Ben Murdochc5610432016-08-08 18:44:38 +0100489 new_budget.Bind(Int32Add(old_budget, weight));
490 Node* condition =
491 Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100492 Branch(condition, &ok, &interrupt_check);
493
494 // Perform interrupt and reset budget.
495 Bind(&interrupt_check);
Ben Murdochc5610432016-08-08 18:44:38 +0100496 {
497 CallRuntime(Runtime::kInterrupt, GetContext());
498 new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
499 Goto(&ok);
500 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100501
502 // Update budget.
503 Bind(&ok);
504 StoreNoWriteBarrier(MachineRepresentation::kWord32,
Ben Murdochc5610432016-08-08 18:44:38 +0100505 BytecodeArrayTaggedPointer(), budget_offset,
506 new_budget.value());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100507}
508
509Node* InterpreterAssembler::Advance(int delta) {
Ben Murdochda12d292016-06-02 14:46:10 +0100510 return IntPtrAdd(BytecodeOffset(), IntPtrConstant(delta));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100511}
512
513Node* InterpreterAssembler::Advance(Node* delta) {
514 return IntPtrAdd(BytecodeOffset(), delta);
515}
516
Ben Murdochc5610432016-08-08 18:44:38 +0100517Node* InterpreterAssembler::Jump(Node* delta) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100518 UpdateInterruptBudget(delta);
Ben Murdochc5610432016-08-08 18:44:38 +0100519 return DispatchTo(Advance(delta));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100520}
521
522void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
Ben Murdochc5610432016-08-08 18:44:38 +0100523 Label match(this), no_match(this);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100524
Ben Murdochc5610432016-08-08 18:44:38 +0100525 BranchIf(condition, &match, &no_match);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100526 Bind(&match);
527 Jump(delta);
528 Bind(&no_match);
529 Dispatch();
530}
531
532void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
533 JumpConditional(WordEqual(lhs, rhs), delta);
534}
535
536void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
537 Node* delta) {
538 JumpConditional(WordNotEqual(lhs, rhs), delta);
539}
540
Ben Murdochc5610432016-08-08 18:44:38 +0100541Node* InterpreterAssembler::Dispatch() {
542 return DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100543}
544
Ben Murdochc5610432016-08-08 18:44:38 +0100545Node* InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100546 Node* target_bytecode = Load(
547 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
Ben Murdochda12d292016-06-02 14:46:10 +0100548 if (kPointerSize == 8) {
549 target_bytecode = ChangeUint32ToUint64(target_bytecode);
550 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100551
Ben Murdochc5610432016-08-08 18:44:38 +0100552 if (FLAG_trace_ignition_dispatches) {
553 TraceBytecodeDispatch(target_bytecode);
554 }
555
556 Node* target_code_entry =
Ben Murdoch097c5b22016-05-18 11:27:45 +0100557 Load(MachineType::Pointer(), DispatchTableRawPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100558 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100559
Ben Murdochc5610432016-08-08 18:44:38 +0100560 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561}
562
Ben Murdochc5610432016-08-08 18:44:38 +0100563Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
564 Node* bytecode_offset) {
565 Node* handler_entry =
566 IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
567 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
568}
569
570Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
571 Node* handler_entry, Node* bytecode_offset) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100572 if (FLAG_trace_ignition) {
573 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
574 }
575
576 InterpreterDispatchDescriptor descriptor(isolate());
Ben Murdochc5610432016-08-08 18:44:38 +0100577 Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
578 BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
579 return TailCallBytecodeDispatch(descriptor, handler_entry, args);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100580}
581
Ben Murdochda12d292016-06-02 14:46:10 +0100582void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
583 // Dispatching a wide bytecode requires treating the prefix
584 // bytecode a base pointer into the dispatch table and dispatching
585 // the bytecode that follows relative to this base.
586 //
587 // Indices 0-255 correspond to bytecodes with operand_scale == 0
588 // Indices 256-511 correspond to bytecodes with operand_scale == 1
589 // Indices 512-767 correspond to bytecodes with operand_scale == 2
590 Node* next_bytecode_offset = Advance(1);
591 Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
592 next_bytecode_offset);
593 if (kPointerSize == 8) {
594 next_bytecode = ChangeUint32ToUint64(next_bytecode);
595 }
Ben Murdochc5610432016-08-08 18:44:38 +0100596
597 if (FLAG_trace_ignition_dispatches) {
598 TraceBytecodeDispatch(next_bytecode);
599 }
600
Ben Murdochda12d292016-06-02 14:46:10 +0100601 Node* base_index;
602 switch (operand_scale) {
603 case OperandScale::kDouble:
604 base_index = IntPtrConstant(1 << kBitsPerByte);
605 break;
606 case OperandScale::kQuadruple:
607 base_index = IntPtrConstant(2 << kBitsPerByte);
608 break;
609 default:
610 UNREACHABLE();
611 base_index = nullptr;
612 }
613 Node* target_index = IntPtrAdd(base_index, next_bytecode);
Ben Murdochc5610432016-08-08 18:44:38 +0100614 Node* target_code_entry =
Ben Murdochda12d292016-06-02 14:46:10 +0100615 Load(MachineType::Pointer(), DispatchTableRawPointer(),
616 WordShl(target_index, kPointerSizeLog2));
617
Ben Murdochc5610432016-08-08 18:44:38 +0100618 DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
Ben Murdochda12d292016-06-02 14:46:10 +0100619}
620
Ben Murdochc5610432016-08-08 18:44:38 +0100621void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100622 // TODO(rmcilroy): Investigate whether it is worth supporting self
623 // optimization of primitive functions like FullCodegen.
624
625 // Update profiling count by -BytecodeOffset to simulate backedge to start of
626 // function.
627 Node* profiling_weight =
628 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
629 BytecodeOffset());
630 UpdateInterruptBudget(profiling_weight);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100631}
632
Ben Murdochc5610432016-08-08 18:44:38 +0100633Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100634 Node* sp = LoadStackPointer();
635 Node* stack_limit = Load(
636 MachineType::Pointer(),
637 ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
Ben Murdochc5610432016-08-08 18:44:38 +0100638 return UintPtrLessThan(sp, stack_limit);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100639}
640
641void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
642 disable_stack_check_across_call_ = true;
643 Node* abort_id = SmiTag(Int32Constant(bailout_reason));
Ben Murdochda12d292016-06-02 14:46:10 +0100644 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100645 disable_stack_check_across_call_ = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100646}
647
648void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
649 BailoutReason bailout_reason) {
Ben Murdochc5610432016-08-08 18:44:38 +0100650 Label ok(this), abort(this, Label::kDeferred);
651 BranchIfWordEqual(lhs, rhs, &ok, &abort);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100652
Ben Murdochc5610432016-08-08 18:44:38 +0100653 Bind(&abort);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100654 Abort(bailout_reason);
Ben Murdochc5610432016-08-08 18:44:38 +0100655 Goto(&ok);
656
657 Bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100658}
659
660void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
661 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100662 SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663}
664
Ben Murdochc5610432016-08-08 18:44:38 +0100665void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
666 Node* counters_table = ExternalConstant(
667 ExternalReference::interpreter_dispatch_counters(isolate()));
668 Node* source_bytecode_table_index = IntPtrConstant(
669 static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
670
671 Node* counter_offset =
672 WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode),
673 IntPtrConstant(kPointerSizeLog2));
674 Node* old_counter =
675 Load(MachineType::IntPtr(), counters_table, counter_offset);
676
677 Label counter_ok(this), counter_saturated(this, Label::kDeferred);
678
679 Node* counter_reached_max = WordEqual(
680 old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
681 BranchIf(counter_reached_max, &counter_saturated, &counter_ok);
682
683 Bind(&counter_ok);
684 {
685 Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
686 StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
687 counter_offset, new_counter);
688 Goto(&counter_saturated);
689 }
690
691 Bind(&counter_saturated);
692}
693
Ben Murdoch097c5b22016-05-18 11:27:45 +0100694// static
695bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
696#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
697 return false;
698#elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
699 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES);
Ben Murdochda12d292016-06-02 14:46:10 +0100700#elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
701 V8_TARGET_ARCH_S390
Ben Murdoch097c5b22016-05-18 11:27:45 +0100702 return true;
703#else
704#error "Unknown Architecture"
705#endif
706}
707
Ben Murdochc5610432016-08-08 18:44:38 +0100708Node* InterpreterAssembler::RegisterCount() {
709 Node* bytecode_array = LoadRegister(Register::bytecode_array());
710 Node* frame_size = LoadObjectField(
711 bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32());
712 return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2));
713}
714
715Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
716 if (FLAG_debug_code) {
717 Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
718 AbortIfWordNotEqual(
719 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
720 }
721
722 Variable var_index(this, MachineRepresentation::kWord32);
723 var_index.Bind(Int32Constant(0));
724
725 // Iterate over register file and write values into array.
726 // The mapping of register to array index must match that used in
727 // BytecodeGraphBuilder::VisitResumeGenerator.
728 Label loop(this, &var_index), done_loop(this);
729 Goto(&loop);
730 Bind(&loop);
731 {
732 Node* index = var_index.value();
733 Node* condition = Int32LessThan(index, RegisterCount());
734 GotoUnless(condition, &done_loop);
735
736 Node* reg_index =
737 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
738 Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index));
739
740 StoreFixedArrayElement(array, index, value);
741
742 var_index.Bind(Int32Add(index, Int32Constant(1)));
743 Goto(&loop);
744 }
745 Bind(&done_loop);
746
747 return array;
748}
749
750Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
751 if (FLAG_debug_code) {
752 Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
753 AbortIfWordNotEqual(
754 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
755 }
756
757 Variable var_index(this, MachineRepresentation::kWord32);
758 var_index.Bind(Int32Constant(0));
759
760 // Iterate over array and write values into register file. Also erase the
761 // array contents to not keep them alive artificially.
762 Label loop(this, &var_index), done_loop(this);
763 Goto(&loop);
764 Bind(&loop);
765 {
766 Node* index = var_index.value();
767 Node* condition = Int32LessThan(index, RegisterCount());
768 GotoUnless(condition, &done_loop);
769
770 Node* value = LoadFixedArrayElement(array, index);
771
772 Node* reg_index =
773 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
774 StoreRegister(value, ChangeInt32ToIntPtr(reg_index));
775
776 StoreFixedArrayElement(array, index, StaleRegisterConstant());
777
778 var_index.Bind(Int32Add(index, Int32Constant(1)));
779 Goto(&loop);
780 }
781 Bind(&done_loop);
782
783 return array;
784}
785
Ben Murdoch097c5b22016-05-18 11:27:45 +0100786} // namespace interpreter
787} // namespace internal
788} // namespace v8