blob: 2663e4a876ef974d48bb072531e86e65db1e8d80 [file] [log] [blame]
Ben Murdoch097c5b22016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter-assembler.h"
6
7#include <ostream>
8
9#include "src/code-factory.h"
10#include "src/frames.h"
11#include "src/interface-descriptors.h"
12#include "src/interpreter/bytecodes.h"
13#include "src/interpreter/interpreter.h"
14#include "src/machine-type.h"
15#include "src/macro-assembler.h"
16#include "src/zone.h"
17
18namespace v8 {
19namespace internal {
20namespace interpreter {
21
22using compiler::Node;
23
24InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
Ben Murdochda12d292016-06-02 14:46:10 +010025 Bytecode bytecode,
26 OperandScale operand_scale)
27 : compiler::CodeStubAssembler(isolate, zone,
28 InterpreterDispatchDescriptor(isolate),
29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
30 Bytecodes::ToString(bytecode), 0),
Ben Murdoch097c5b22016-05-18 11:27:45 +010031 bytecode_(bytecode),
Ben Murdochda12d292016-06-02 14:46:10 +010032 operand_scale_(operand_scale),
Ben Murdoch097c5b22016-05-18 11:27:45 +010033 accumulator_(this, MachineRepresentation::kTagged),
Ben Murdochda12d292016-06-02 14:46:10 +010034 accumulator_use_(AccumulatorUse::kNone),
Ben Murdoch097c5b22016-05-18 11:27:45 +010035 context_(this, MachineRepresentation::kTagged),
36 bytecode_array_(this, MachineRepresentation::kTagged),
37 disable_stack_check_across_call_(false),
38 stack_pointer_before_call_(nullptr) {
39 accumulator_.Bind(
40 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
41 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter));
42 bytecode_array_.Bind(
43 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter));
44 if (FLAG_trace_ignition) {
45 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
46 }
47}
48
Ben Murdochda12d292016-06-02 14:46:10 +010049InterpreterAssembler::~InterpreterAssembler() {
50 // If the following check fails the handler does not use the
51 // accumulator in the way described in the bytecode definitions in
52 // bytecodes.h.
53 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
54}
Ben Murdoch097c5b22016-05-18 11:27:45 +010055
Ben Murdochda12d292016-06-02 14:46:10 +010056Node* InterpreterAssembler::GetAccumulatorUnchecked() {
57 return accumulator_.value();
58}
59
60Node* InterpreterAssembler::GetAccumulator() {
61 DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
62 accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
63 return GetAccumulatorUnchecked();
64}
Ben Murdoch097c5b22016-05-18 11:27:45 +010065
66void InterpreterAssembler::SetAccumulator(Node* value) {
Ben Murdochda12d292016-06-02 14:46:10 +010067 DCHECK(Bytecodes::WritesAccumulator(bytecode_));
68 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
Ben Murdoch097c5b22016-05-18 11:27:45 +010069 accumulator_.Bind(value);
70}
71
72Node* InterpreterAssembler::GetContext() { return context_.value(); }
73
74void InterpreterAssembler::SetContext(Node* value) {
75 StoreRegister(value, Register::current_context());
76 context_.Bind(value);
77}
78
79Node* InterpreterAssembler::BytecodeOffset() {
80 return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter);
81}
82
83Node* InterpreterAssembler::RegisterFileRawPointer() {
84 return Parameter(InterpreterDispatchDescriptor::kRegisterFileParameter);
85}
86
87Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
88 return bytecode_array_.value();
89}
90
91Node* InterpreterAssembler::DispatchTableRawPointer() {
92 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter);
93}
94
95Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
96 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index));
97}
98
99Node* InterpreterAssembler::LoadRegister(int offset) {
100 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100101 IntPtrConstant(offset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100102}
103
104Node* InterpreterAssembler::LoadRegister(Register reg) {
Ben Murdochda12d292016-06-02 14:46:10 +0100105 return LoadRegister(IntPtrConstant(-reg.index()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100106}
107
108Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
109 return WordShl(index, kPointerSizeLog2);
110}
111
112Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
113 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
114 RegisterFrameOffset(reg_index));
115}
116
117Node* InterpreterAssembler::StoreRegister(Node* value, int offset) {
118 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
Ben Murdochda12d292016-06-02 14:46:10 +0100119 RegisterFileRawPointer(), IntPtrConstant(offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100120 value);
121}
122
123Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
Ben Murdochda12d292016-06-02 14:46:10 +0100124 return StoreRegister(value, IntPtrConstant(-reg.index()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100125}
126
127Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
128 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
129 RegisterFileRawPointer(),
130 RegisterFrameOffset(reg_index), value);
131}
132
133Node* InterpreterAssembler::NextRegister(Node* reg_index) {
134 // Register indexes are negative, so the next index is minus one.
Ben Murdochda12d292016-06-02 14:46:10 +0100135 return IntPtrAdd(reg_index, IntPtrConstant(-1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100136}
137
Ben Murdochda12d292016-06-02 14:46:10 +0100138Node* InterpreterAssembler::OperandOffset(int operand_index) {
139 return IntPtrConstant(
140 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100141}
142
Ben Murdochda12d292016-06-02 14:46:10 +0100143Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100144 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100145 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
146 bytecode_, operand_index, operand_scale()));
147 Node* operand_offset = OperandOffset(operand_index);
148 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
149 IntPtrAdd(BytecodeOffset(), operand_offset));
150}
151
152Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
153 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
154 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
155 bytecode_, operand_index, operand_scale()));
156 Node* operand_offset = OperandOffset(operand_index);
157 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
158 IntPtrAdd(BytecodeOffset(), operand_offset));
159
Ben Murdoch097c5b22016-05-18 11:27:45 +0100160 // Ensure that we sign extend to full pointer size
161 if (kPointerSize == 8) {
162 load = ChangeInt32ToInt64(load);
163 }
164 return load;
165}
166
Ben Murdochda12d292016-06-02 14:46:10 +0100167compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
168 int relative_offset, MachineType result_type) {
169 static const int kMaxCount = 4;
170 DCHECK(!TargetSupportsUnalignedAccess());
171
172 int count;
173 switch (result_type.representation()) {
174 case MachineRepresentation::kWord16:
175 count = 2;
176 break;
177 case MachineRepresentation::kWord32:
178 count = 4;
179 break;
180 default:
181 UNREACHABLE();
182 break;
183 }
184 MachineType msb_type =
185 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
186
Ben Murdoch097c5b22016-05-18 11:27:45 +0100187#if V8_TARGET_LITTLE_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100188 const int kStep = -1;
189 int msb_offset = count - 1;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100190#elif V8_TARGET_BIG_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100191 const int kStep = 1;
192 int msb_offset = 0;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100193#else
194#error "Unknown Architecture"
195#endif
Ben Murdochda12d292016-06-02 14:46:10 +0100196
197 // Read the most signicant bytecode into bytes[0] and then in order
198 // down to least significant in bytes[count - 1].
199 DCHECK(count <= kMaxCount);
200 compiler::Node* bytes[kMaxCount];
201 for (int i = 0; i < count; i++) {
202 MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
203 Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
204 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
205 bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
206 }
207
208 // Pack LSB to MSB.
209 Node* result = bytes[--count];
210 for (int i = 1; --count >= 0; i++) {
211 Node* shift = Int32Constant(i * kBitsPerByte);
212 Node* value = Word32Shl(bytes[count], shift);
213 result = Word32Or(value, result);
214 }
215 return result;
216}
217
218Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
219 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
220 DCHECK_EQ(
221 OperandSize::kShort,
222 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
223 int operand_offset =
224 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
225 if (TargetSupportsUnalignedAccess()) {
226 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
227 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
228 } else {
229 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100230 }
231}
232
Ben Murdochda12d292016-06-02 14:46:10 +0100233Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100234 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100235 DCHECK_EQ(
236 OperandSize::kShort,
237 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
238 int operand_offset =
239 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100240 Node* load;
241 if (TargetSupportsUnalignedAccess()) {
242 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100243 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100244 } else {
Ben Murdochda12d292016-06-02 14:46:10 +0100245 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100246 }
247
248 // Ensure that we sign extend to full pointer size
249 if (kPointerSize == 8) {
250 load = ChangeInt32ToInt64(load);
251 }
252 return load;
253}
254
Ben Murdochda12d292016-06-02 14:46:10 +0100255Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
256 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
257 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
258 bytecode_, operand_index, operand_scale()));
259 int operand_offset =
260 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
261 if (TargetSupportsUnalignedAccess()) {
262 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
263 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
264 } else {
265 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
266 }
267}
268
269Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
270 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
271 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
272 bytecode_, operand_index, operand_scale()));
273 int operand_offset =
274 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
275 Node* load;
276 if (TargetSupportsUnalignedAccess()) {
277 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
278 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
279 } else {
280 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
281 }
282
283 // Ensure that we sign extend to full pointer size
284 if (kPointerSize == 8) {
285 load = ChangeInt32ToInt64(load);
286 }
287 return load;
288}
289
290Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
291 OperandSize operand_size) {
292 DCHECK(!Bytecodes::IsUnsignedOperandType(
293 Bytecodes::GetOperandType(bytecode_, operand_index)));
294 switch (operand_size) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100295 case OperandSize::kByte:
Ben Murdochda12d292016-06-02 14:46:10 +0100296 return BytecodeOperandSignedByte(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100297 case OperandSize::kShort:
Ben Murdochda12d292016-06-02 14:46:10 +0100298 return BytecodeOperandSignedShort(operand_index);
299 case OperandSize::kQuad:
300 return BytecodeOperandSignedQuad(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100301 case OperandSize::kNone:
302 UNREACHABLE();
303 }
304 return nullptr;
305}
306
Ben Murdochda12d292016-06-02 14:46:10 +0100307Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
308 OperandSize operand_size) {
309 DCHECK(Bytecodes::IsUnsignedOperandType(
310 Bytecodes::GetOperandType(bytecode_, operand_index)));
311 switch (operand_size) {
312 case OperandSize::kByte:
313 return BytecodeOperandUnsignedByte(operand_index);
314 case OperandSize::kShort:
315 return BytecodeOperandUnsignedShort(operand_index);
316 case OperandSize::kQuad:
317 return BytecodeOperandUnsignedQuad(operand_index);
318 case OperandSize::kNone:
319 UNREACHABLE();
320 }
321 return nullptr;
322}
323
324Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
325 DCHECK_EQ(OperandType::kRegCount,
326 Bytecodes::GetOperandType(bytecode_, operand_index));
327 OperandSize operand_size =
328 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
329 return BytecodeUnsignedOperand(operand_index, operand_size);
330}
331
332Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
333 DCHECK_EQ(OperandType::kFlag8,
334 Bytecodes::GetOperandType(bytecode_, operand_index));
335 OperandSize operand_size =
336 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
337 DCHECK_EQ(operand_size, OperandSize::kByte);
338 return BytecodeUnsignedOperand(operand_index, operand_size);
339}
340
Ben Murdoch097c5b22016-05-18 11:27:45 +0100341Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100342 DCHECK_EQ(OperandType::kImm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100343 Bytecodes::GetOperandType(bytecode_, operand_index));
Ben Murdochda12d292016-06-02 14:46:10 +0100344 OperandSize operand_size =
345 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
346 return BytecodeSignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100347}
348
349Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100350 DCHECK(OperandType::kIdx ==
351 Bytecodes::GetOperandType(bytecode_, operand_index));
352 OperandSize operand_size =
353 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
354 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100355}
356
357Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100358 DCHECK(Bytecodes::IsRegisterOperandType(
359 Bytecodes::GetOperandType(bytecode_, operand_index)));
360 OperandSize operand_size =
361 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
362 return BytecodeSignedOperand(operand_index, operand_size);
363}
364
365Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
366 DCHECK(OperandType::kRuntimeId ==
367 Bytecodes::GetOperandType(bytecode_, operand_index));
368 OperandSize operand_size =
369 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
370 DCHECK_EQ(operand_size, OperandSize::kShort);
371 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100372}
373
374Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
375 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
376 BytecodeArray::kConstantPoolOffset);
377 Node* entry_offset =
378 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
379 WordShl(index, kPointerSizeLog2));
380 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
381}
382
Ben Murdoch097c5b22016-05-18 11:27:45 +0100383Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) {
384 return Load(MachineType::AnyTagged(), object,
385 IntPtrConstant(offset - kHeapObjectTag));
386}
387
388Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) {
389 return Load(MachineType::AnyTagged(), context,
390 IntPtrConstant(Context::SlotOffset(slot_index)));
391}
392
393Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) {
394 Node* offset =
395 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100396 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100397 return Load(MachineType::AnyTagged(), context, offset);
398}
399
400Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index,
401 Node* value) {
402 Node* offset =
403 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100404 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100405 return Store(MachineRepresentation::kTagged, context, offset, value);
406}
407
408Node* InterpreterAssembler::LoadTypeFeedbackVector() {
409 Node* function = Load(
410 MachineType::AnyTagged(), RegisterFileRawPointer(),
411 IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer));
412 Node* shared_info =
413 LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset);
414 Node* vector =
415 LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset);
416 return vector;
417}
418
419void InterpreterAssembler::CallPrologue() {
420 StoreRegister(SmiTag(BytecodeOffset()),
421 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100422
423 if (FLAG_debug_code && !disable_stack_check_across_call_) {
424 DCHECK(stack_pointer_before_call_ == nullptr);
425 stack_pointer_before_call_ = LoadStackPointer();
426 }
427}
428
429void InterpreterAssembler::CallEpilogue() {
430 if (FLAG_debug_code && !disable_stack_check_across_call_) {
431 Node* stack_pointer_after_call = LoadStackPointer();
432 Node* stack_pointer_before_call = stack_pointer_before_call_;
433 stack_pointer_before_call_ = nullptr;
434 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
435 kUnexpectedStackPointer);
436 }
437
438 // Restore bytecode array from stack frame in case the debugger has swapped us
439 // to the patched debugger bytecode array.
440 bytecode_array_.Bind(LoadRegister(
441 InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer));
442}
443
444Node* InterpreterAssembler::CallJS(Node* function, Node* context,
445 Node* first_arg, Node* arg_count,
446 TailCallMode tail_call_mode) {
447 Callable callable =
448 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
449 Node* code_target = HeapConstant(callable.code());
450 return CallStub(callable.descriptor(), code_target, context, arg_count,
451 first_arg, function);
452}
453
454Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
455 Node* new_target, Node* first_arg,
456 Node* arg_count) {
457 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate());
458 Node* code_target = HeapConstant(callable.code());
459 return CallStub(callable.descriptor(), code_target, context, arg_count,
460 new_target, constructor, first_arg);
461}
462
463Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
464 Node* first_arg, Node* arg_count,
465 int result_size) {
466 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
467 Node* code_target = HeapConstant(callable.code());
468
469 // Get the function entry from the function id.
470 Node* function_table = ExternalConstant(
471 ExternalReference::runtime_function_table_address(isolate()));
472 Node* function_offset =
473 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
474 Node* function = IntPtrAdd(function_table, function_offset);
475 Node* function_entry =
476 Load(MachineType::Pointer(), function,
Ben Murdochda12d292016-06-02 14:46:10 +0100477 IntPtrConstant(offsetof(Runtime::Function, entry)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100478
479 return CallStub(callable.descriptor(), code_target, context, arg_count,
480 first_arg, function_entry, result_size);
481}
482
483void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
484 CodeStubAssembler::Label ok(this);
485 CodeStubAssembler::Label interrupt_check(this);
486 CodeStubAssembler::Label end(this);
487 Node* budget_offset =
488 IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
489
490 // Update budget by |weight| and check if it reaches zero.
491 Node* old_budget =
492 Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
493 Node* new_budget = Int32Add(old_budget, weight);
494 Node* condition = Int32GreaterThanOrEqual(new_budget, Int32Constant(0));
495 Branch(condition, &ok, &interrupt_check);
496
497 // Perform interrupt and reset budget.
498 Bind(&interrupt_check);
499 CallRuntime(Runtime::kInterrupt, GetContext());
500 StoreNoWriteBarrier(MachineRepresentation::kWord32,
501 BytecodeArrayTaggedPointer(), budget_offset,
502 Int32Constant(Interpreter::InterruptBudget()));
503 Goto(&end);
504
505 // Update budget.
506 Bind(&ok);
507 StoreNoWriteBarrier(MachineRepresentation::kWord32,
508 BytecodeArrayTaggedPointer(), budget_offset, new_budget);
509 Goto(&end);
510 Bind(&end);
511}
512
513Node* InterpreterAssembler::Advance(int delta) {
Ben Murdochda12d292016-06-02 14:46:10 +0100514 return IntPtrAdd(BytecodeOffset(), IntPtrConstant(delta));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100515}
516
517Node* InterpreterAssembler::Advance(Node* delta) {
518 return IntPtrAdd(BytecodeOffset(), delta);
519}
520
521void InterpreterAssembler::Jump(Node* delta) {
522 UpdateInterruptBudget(delta);
523 DispatchTo(Advance(delta));
524}
525
526void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
527 CodeStubAssembler::Label match(this);
528 CodeStubAssembler::Label no_match(this);
529
530 Branch(condition, &match, &no_match);
531 Bind(&match);
532 Jump(delta);
533 Bind(&no_match);
534 Dispatch();
535}
536
537void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
538 JumpConditional(WordEqual(lhs, rhs), delta);
539}
540
541void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
542 Node* delta) {
543 JumpConditional(WordNotEqual(lhs, rhs), delta);
544}
545
546void InterpreterAssembler::Dispatch() {
Ben Murdochda12d292016-06-02 14:46:10 +0100547 DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100548}
549
550void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
551 Node* target_bytecode = Load(
552 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
Ben Murdochda12d292016-06-02 14:46:10 +0100553 if (kPointerSize == 8) {
554 target_bytecode = ChangeUint32ToUint64(target_bytecode);
555 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100556
557 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion
558 // from code object on every dispatch.
559 Node* target_code_object =
560 Load(MachineType::Pointer(), DispatchTableRawPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100561 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100562
563 DispatchToBytecodeHandler(target_code_object, new_bytecode_offset);
564}
565
566void InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
567 Node* bytecode_offset) {
568 if (FLAG_trace_ignition) {
569 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
570 }
571
572 InterpreterDispatchDescriptor descriptor(isolate());
Ben Murdochda12d292016-06-02 14:46:10 +0100573 Node* args[] = {GetAccumulatorUnchecked(), RegisterFileRawPointer(),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100574 bytecode_offset, BytecodeArrayTaggedPointer(),
575 DispatchTableRawPointer(), GetContext()};
576 TailCall(descriptor, handler, args, 0);
577}
578
Ben Murdochda12d292016-06-02 14:46:10 +0100579void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
580 // Dispatching a wide bytecode requires treating the prefix
581 // bytecode a base pointer into the dispatch table and dispatching
582 // the bytecode that follows relative to this base.
583 //
584 // Indices 0-255 correspond to bytecodes with operand_scale == 0
585 // Indices 256-511 correspond to bytecodes with operand_scale == 1
586 // Indices 512-767 correspond to bytecodes with operand_scale == 2
587 Node* next_bytecode_offset = Advance(1);
588 Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
589 next_bytecode_offset);
590 if (kPointerSize == 8) {
591 next_bytecode = ChangeUint32ToUint64(next_bytecode);
592 }
593 Node* base_index;
594 switch (operand_scale) {
595 case OperandScale::kDouble:
596 base_index = IntPtrConstant(1 << kBitsPerByte);
597 break;
598 case OperandScale::kQuadruple:
599 base_index = IntPtrConstant(2 << kBitsPerByte);
600 break;
601 default:
602 UNREACHABLE();
603 base_index = nullptr;
604 }
605 Node* target_index = IntPtrAdd(base_index, next_bytecode);
606 Node* target_code_object =
607 Load(MachineType::Pointer(), DispatchTableRawPointer(),
608 WordShl(target_index, kPointerSizeLog2));
609
610 DispatchToBytecodeHandler(target_code_object, next_bytecode_offset);
611}
612
Ben Murdoch097c5b22016-05-18 11:27:45 +0100613void InterpreterAssembler::InterpreterReturn() {
614 // TODO(rmcilroy): Investigate whether it is worth supporting self
615 // optimization of primitive functions like FullCodegen.
616
617 // Update profiling count by -BytecodeOffset to simulate backedge to start of
618 // function.
619 Node* profiling_weight =
620 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
621 BytecodeOffset());
622 UpdateInterruptBudget(profiling_weight);
623
624 Node* exit_trampoline_code_object =
625 HeapConstant(isolate()->builtins()->InterpreterExitTrampoline());
626 DispatchToBytecodeHandler(exit_trampoline_code_object);
627}
628
629void InterpreterAssembler::StackCheck() {
630 CodeStubAssembler::Label end(this);
631 CodeStubAssembler::Label ok(this);
632 CodeStubAssembler::Label stack_guard(this);
633
634 Node* sp = LoadStackPointer();
635 Node* stack_limit = Load(
636 MachineType::Pointer(),
637 ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
638 Node* condition = UintPtrGreaterThanOrEqual(sp, stack_limit);
639 Branch(condition, &ok, &stack_guard);
640 Bind(&stack_guard);
641 CallRuntime(Runtime::kStackGuard, GetContext());
642 Goto(&end);
643 Bind(&ok);
644 Goto(&end);
645 Bind(&end);
646}
647
648void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
649 disable_stack_check_across_call_ = true;
650 Node* abort_id = SmiTag(Int32Constant(bailout_reason));
Ben Murdochda12d292016-06-02 14:46:10 +0100651 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100652 disable_stack_check_across_call_ = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653}
654
655void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
656 BailoutReason bailout_reason) {
657 CodeStubAssembler::Label match(this);
658 CodeStubAssembler::Label no_match(this);
Ben Murdochda12d292016-06-02 14:46:10 +0100659 CodeStubAssembler::Label end(this);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100660
661 Node* condition = WordEqual(lhs, rhs);
662 Branch(condition, &match, &no_match);
663 Bind(&no_match);
664 Abort(bailout_reason);
Ben Murdochda12d292016-06-02 14:46:10 +0100665 Goto(&end);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100666 Bind(&match);
Ben Murdochda12d292016-06-02 14:46:10 +0100667 Goto(&end);
668 Bind(&end);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100669}
670
671void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
672 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100673 SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100674}
675
676// static
677bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
678#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
679 return false;
680#elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
681 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES);
Ben Murdochda12d292016-06-02 14:46:10 +0100682#elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
683 V8_TARGET_ARCH_S390
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 return true;
685#else
686#error "Unknown Architecture"
687#endif
688}
689
690} // namespace interpreter
691} // namespace internal
692} // namespace v8