blob: 4e911ebb7c61b4aae31392855e19a6ce236c012f [file] [log] [blame]
Ben Murdoch097c5b22016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter-assembler.h"
6
Ben Murdochc5610432016-08-08 18:44:38 +01007#include <limits>
Ben Murdoch097c5b22016-05-18 11:27:45 +01008#include <ostream>
9
10#include "src/code-factory.h"
11#include "src/frames.h"
12#include "src/interface-descriptors.h"
13#include "src/interpreter/bytecodes.h"
14#include "src/interpreter/interpreter.h"
15#include "src/machine-type.h"
16#include "src/macro-assembler.h"
17#include "src/zone.h"
18
19namespace v8 {
20namespace internal {
21namespace interpreter {
22
23using compiler::Node;
24
25InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
Ben Murdochda12d292016-06-02 14:46:10 +010026 Bytecode bytecode,
27 OperandScale operand_scale)
Ben Murdochc5610432016-08-08 18:44:38 +010028 : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate),
29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
30 Bytecodes::ToString(bytecode),
31 Bytecodes::ReturnCount(bytecode)),
Ben Murdoch097c5b22016-05-18 11:27:45 +010032 bytecode_(bytecode),
Ben Murdochda12d292016-06-02 14:46:10 +010033 operand_scale_(operand_scale),
Ben Murdoch097c5b22016-05-18 11:27:45 +010034 accumulator_(this, MachineRepresentation::kTagged),
Ben Murdochda12d292016-06-02 14:46:10 +010035 accumulator_use_(AccumulatorUse::kNone),
Ben Murdochc5610432016-08-08 18:44:38 +010036 made_call_(false),
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 disable_stack_check_across_call_(false),
38 stack_pointer_before_call_(nullptr) {
39 accumulator_.Bind(
40 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 if (FLAG_trace_ignition) {
42 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
43 }
44}
45
Ben Murdochda12d292016-06-02 14:46:10 +010046InterpreterAssembler::~InterpreterAssembler() {
47 // If the following check fails the handler does not use the
48 // accumulator in the way described in the bytecode definitions in
49 // bytecodes.h.
50 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
51}
Ben Murdoch097c5b22016-05-18 11:27:45 +010052
Ben Murdochda12d292016-06-02 14:46:10 +010053Node* InterpreterAssembler::GetAccumulatorUnchecked() {
54 return accumulator_.value();
55}
56
57Node* InterpreterAssembler::GetAccumulator() {
58 DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
59 accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
60 return GetAccumulatorUnchecked();
61}
Ben Murdoch097c5b22016-05-18 11:27:45 +010062
63void InterpreterAssembler::SetAccumulator(Node* value) {
Ben Murdochda12d292016-06-02 14:46:10 +010064 DCHECK(Bytecodes::WritesAccumulator(bytecode_));
65 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
Ben Murdoch097c5b22016-05-18 11:27:45 +010066 accumulator_.Bind(value);
67}
68
Ben Murdochc5610432016-08-08 18:44:38 +010069Node* InterpreterAssembler::GetContext() {
70 return LoadRegister(Register::current_context());
71}
Ben Murdoch097c5b22016-05-18 11:27:45 +010072
73void InterpreterAssembler::SetContext(Node* value) {
74 StoreRegister(value, Register::current_context());
Ben Murdoch097c5b22016-05-18 11:27:45 +010075}
76
77Node* InterpreterAssembler::BytecodeOffset() {
78 return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter);
79}
80
Ben Murdoch097c5b22016-05-18 11:27:45 +010081Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
Ben Murdochc5610432016-08-08 18:44:38 +010082 if (made_call_) {
83 // If we have made a call, restore bytecode array from stack frame in case
84 // the debugger has swapped us to the patched debugger bytecode array.
85 return LoadRegister(Register::bytecode_array());
86 } else {
87 return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter);
88 }
Ben Murdoch097c5b22016-05-18 11:27:45 +010089}
90
91Node* InterpreterAssembler::DispatchTableRawPointer() {
92 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter);
93}
94
95Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
Ben Murdochc5610432016-08-08 18:44:38 +010096 return IntPtrAdd(LoadParentFramePointer(), RegisterFrameOffset(reg_index));
Ben Murdoch097c5b22016-05-18 11:27:45 +010097}
98
99Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
100 return WordShl(index, kPointerSizeLog2);
101}
102
Ben Murdochc5610432016-08-08 18:44:38 +0100103Node* InterpreterAssembler::LoadRegister(Register reg) {
104 return Load(MachineType::AnyTagged(), LoadParentFramePointer(),
105 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
106}
107
Ben Murdoch097c5b22016-05-18 11:27:45 +0100108Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
Ben Murdochc5610432016-08-08 18:44:38 +0100109 return Load(MachineType::AnyTagged(), LoadParentFramePointer(),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100110 RegisterFrameOffset(reg_index));
111}
112
Ben Murdoch097c5b22016-05-18 11:27:45 +0100113Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
Ben Murdochc5610432016-08-08 18:44:38 +0100114 return StoreNoWriteBarrier(
115 MachineRepresentation::kTagged, LoadParentFramePointer(),
116 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100117}
118
119Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
120 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
Ben Murdochc5610432016-08-08 18:44:38 +0100121 LoadParentFramePointer(),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100122 RegisterFrameOffset(reg_index), value);
123}
124
125Node* InterpreterAssembler::NextRegister(Node* reg_index) {
126 // Register indexes are negative, so the next index is minus one.
Ben Murdochda12d292016-06-02 14:46:10 +0100127 return IntPtrAdd(reg_index, IntPtrConstant(-1));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100128}
129
Ben Murdochda12d292016-06-02 14:46:10 +0100130Node* InterpreterAssembler::OperandOffset(int operand_index) {
131 return IntPtrConstant(
132 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100133}
134
Ben Murdochda12d292016-06-02 14:46:10 +0100135Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100136 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100137 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
138 bytecode_, operand_index, operand_scale()));
139 Node* operand_offset = OperandOffset(operand_index);
140 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
141 IntPtrAdd(BytecodeOffset(), operand_offset));
142}
143
144Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
145 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
146 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
147 bytecode_, operand_index, operand_scale()));
148 Node* operand_offset = OperandOffset(operand_index);
149 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
150 IntPtrAdd(BytecodeOffset(), operand_offset));
151
Ben Murdoch097c5b22016-05-18 11:27:45 +0100152 // Ensure that we sign extend to full pointer size
153 if (kPointerSize == 8) {
154 load = ChangeInt32ToInt64(load);
155 }
156 return load;
157}
158
Ben Murdochda12d292016-06-02 14:46:10 +0100159compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
160 int relative_offset, MachineType result_type) {
161 static const int kMaxCount = 4;
162 DCHECK(!TargetSupportsUnalignedAccess());
163
164 int count;
165 switch (result_type.representation()) {
166 case MachineRepresentation::kWord16:
167 count = 2;
168 break;
169 case MachineRepresentation::kWord32:
170 count = 4;
171 break;
172 default:
173 UNREACHABLE();
174 break;
175 }
176 MachineType msb_type =
177 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
178
Ben Murdoch097c5b22016-05-18 11:27:45 +0100179#if V8_TARGET_LITTLE_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100180 const int kStep = -1;
181 int msb_offset = count - 1;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100182#elif V8_TARGET_BIG_ENDIAN
Ben Murdochda12d292016-06-02 14:46:10 +0100183 const int kStep = 1;
184 int msb_offset = 0;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100185#else
186#error "Unknown Architecture"
187#endif
Ben Murdochda12d292016-06-02 14:46:10 +0100188
189 // Read the most signicant bytecode into bytes[0] and then in order
190 // down to least significant in bytes[count - 1].
191 DCHECK(count <= kMaxCount);
192 compiler::Node* bytes[kMaxCount];
193 for (int i = 0; i < count; i++) {
194 MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
195 Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
196 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
197 bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
198 }
199
200 // Pack LSB to MSB.
201 Node* result = bytes[--count];
202 for (int i = 1; --count >= 0; i++) {
203 Node* shift = Int32Constant(i * kBitsPerByte);
204 Node* value = Word32Shl(bytes[count], shift);
205 result = Word32Or(value, result);
206 }
207 return result;
208}
209
210Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
211 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
212 DCHECK_EQ(
213 OperandSize::kShort,
214 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
215 int operand_offset =
216 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
217 if (TargetSupportsUnalignedAccess()) {
218 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
219 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
220 } else {
221 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100222 }
223}
224
Ben Murdochda12d292016-06-02 14:46:10 +0100225Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100226 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdochda12d292016-06-02 14:46:10 +0100227 DCHECK_EQ(
228 OperandSize::kShort,
229 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
230 int operand_offset =
231 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100232 Node* load;
233 if (TargetSupportsUnalignedAccess()) {
234 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100235 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100236 } else {
Ben Murdochda12d292016-06-02 14:46:10 +0100237 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100238 }
239
240 // Ensure that we sign extend to full pointer size
241 if (kPointerSize == 8) {
242 load = ChangeInt32ToInt64(load);
243 }
244 return load;
245}
246
Ben Murdochda12d292016-06-02 14:46:10 +0100247Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
248 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
249 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
250 bytecode_, operand_index, operand_scale()));
251 int operand_offset =
252 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
253 if (TargetSupportsUnalignedAccess()) {
254 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
255 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
256 } else {
257 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
258 }
259}
260
261Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
262 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
263 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
264 bytecode_, operand_index, operand_scale()));
265 int operand_offset =
266 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
267 Node* load;
268 if (TargetSupportsUnalignedAccess()) {
269 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
270 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
271 } else {
272 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
273 }
274
275 // Ensure that we sign extend to full pointer size
276 if (kPointerSize == 8) {
277 load = ChangeInt32ToInt64(load);
278 }
279 return load;
280}
281
282Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
283 OperandSize operand_size) {
284 DCHECK(!Bytecodes::IsUnsignedOperandType(
285 Bytecodes::GetOperandType(bytecode_, operand_index)));
286 switch (operand_size) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100287 case OperandSize::kByte:
Ben Murdochda12d292016-06-02 14:46:10 +0100288 return BytecodeOperandSignedByte(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100289 case OperandSize::kShort:
Ben Murdochda12d292016-06-02 14:46:10 +0100290 return BytecodeOperandSignedShort(operand_index);
291 case OperandSize::kQuad:
292 return BytecodeOperandSignedQuad(operand_index);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100293 case OperandSize::kNone:
294 UNREACHABLE();
295 }
296 return nullptr;
297}
298
Ben Murdochda12d292016-06-02 14:46:10 +0100299Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
300 OperandSize operand_size) {
301 DCHECK(Bytecodes::IsUnsignedOperandType(
302 Bytecodes::GetOperandType(bytecode_, operand_index)));
303 switch (operand_size) {
304 case OperandSize::kByte:
305 return BytecodeOperandUnsignedByte(operand_index);
306 case OperandSize::kShort:
307 return BytecodeOperandUnsignedShort(operand_index);
308 case OperandSize::kQuad:
309 return BytecodeOperandUnsignedQuad(operand_index);
310 case OperandSize::kNone:
311 UNREACHABLE();
312 }
313 return nullptr;
314}
315
316Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
317 DCHECK_EQ(OperandType::kRegCount,
318 Bytecodes::GetOperandType(bytecode_, operand_index));
319 OperandSize operand_size =
320 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
321 return BytecodeUnsignedOperand(operand_index, operand_size);
322}
323
324Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
325 DCHECK_EQ(OperandType::kFlag8,
326 Bytecodes::GetOperandType(bytecode_, operand_index));
327 OperandSize operand_size =
328 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
329 DCHECK_EQ(operand_size, OperandSize::kByte);
330 return BytecodeUnsignedOperand(operand_index, operand_size);
331}
332
Ben Murdoch097c5b22016-05-18 11:27:45 +0100333Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100334 DCHECK_EQ(OperandType::kImm,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100335 Bytecodes::GetOperandType(bytecode_, operand_index));
Ben Murdochda12d292016-06-02 14:46:10 +0100336 OperandSize operand_size =
337 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
338 return BytecodeSignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100339}
340
341Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100342 DCHECK(OperandType::kIdx ==
343 Bytecodes::GetOperandType(bytecode_, operand_index));
344 OperandSize operand_size =
345 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
346 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100347}
348
349Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
Ben Murdochda12d292016-06-02 14:46:10 +0100350 DCHECK(Bytecodes::IsRegisterOperandType(
351 Bytecodes::GetOperandType(bytecode_, operand_index)));
352 OperandSize operand_size =
353 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
354 return BytecodeSignedOperand(operand_index, operand_size);
355}
356
357Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
358 DCHECK(OperandType::kRuntimeId ==
359 Bytecodes::GetOperandType(bytecode_, operand_index));
360 OperandSize operand_size =
361 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
362 DCHECK_EQ(operand_size, OperandSize::kShort);
363 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100364}
365
366Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
367 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
368 BytecodeArray::kConstantPoolOffset);
369 Node* entry_offset =
370 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
371 WordShl(index, kPointerSizeLog2));
372 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
373}
374
Ben Murdoch097c5b22016-05-18 11:27:45 +0100375Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) {
376 return Load(MachineType::AnyTagged(), context,
377 IntPtrConstant(Context::SlotOffset(slot_index)));
378}
379
380Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) {
381 Node* offset =
382 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100383 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100384 return Load(MachineType::AnyTagged(), context, offset);
385}
386
387Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index,
388 Node* value) {
389 Node* offset =
390 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdochda12d292016-06-02 14:46:10 +0100391 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100392 return Store(MachineRepresentation::kTagged, context, offset, value);
393}
394
395Node* InterpreterAssembler::LoadTypeFeedbackVector() {
Ben Murdochc5610432016-08-08 18:44:38 +0100396 Node* function = LoadRegister(Register::function_closure());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100397 Node* shared_info =
398 LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset);
399 Node* vector =
400 LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset);
401 return vector;
402}
403
404void InterpreterAssembler::CallPrologue() {
Ben Murdochc5610432016-08-08 18:44:38 +0100405 StoreRegister(SmiTag(BytecodeOffset()), Register::bytecode_offset());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100406
407 if (FLAG_debug_code && !disable_stack_check_across_call_) {
408 DCHECK(stack_pointer_before_call_ == nullptr);
409 stack_pointer_before_call_ = LoadStackPointer();
410 }
Ben Murdochc5610432016-08-08 18:44:38 +0100411 made_call_ = true;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100412}
413
414void InterpreterAssembler::CallEpilogue() {
415 if (FLAG_debug_code && !disable_stack_check_across_call_) {
416 Node* stack_pointer_after_call = LoadStackPointer();
417 Node* stack_pointer_before_call = stack_pointer_before_call_;
418 stack_pointer_before_call_ = nullptr;
419 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
420 kUnexpectedStackPointer);
421 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100422}
423
424Node* InterpreterAssembler::CallJS(Node* function, Node* context,
425 Node* first_arg, Node* arg_count,
426 TailCallMode tail_call_mode) {
427 Callable callable =
428 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
429 Node* code_target = HeapConstant(callable.code());
430 return CallStub(callable.descriptor(), code_target, context, arg_count,
431 first_arg, function);
432}
433
434Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
435 Node* new_target, Node* first_arg,
436 Node* arg_count) {
437 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate());
438 Node* code_target = HeapConstant(callable.code());
439 return CallStub(callable.descriptor(), code_target, context, arg_count,
440 new_target, constructor, first_arg);
441}
442
443Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
444 Node* first_arg, Node* arg_count,
445 int result_size) {
446 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
447 Node* code_target = HeapConstant(callable.code());
448
449 // Get the function entry from the function id.
450 Node* function_table = ExternalConstant(
451 ExternalReference::runtime_function_table_address(isolate()));
452 Node* function_offset =
453 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
454 Node* function = IntPtrAdd(function_table, function_offset);
455 Node* function_entry =
456 Load(MachineType::Pointer(), function,
Ben Murdochda12d292016-06-02 14:46:10 +0100457 IntPtrConstant(offsetof(Runtime::Function, entry)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100458
459 return CallStub(callable.descriptor(), code_target, context, arg_count,
460 first_arg, function_entry, result_size);
461}
462
463void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
Ben Murdochc5610432016-08-08 18:44:38 +0100464 Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100465 Node* budget_offset =
466 IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
467
468 // Update budget by |weight| and check if it reaches zero.
Ben Murdochc5610432016-08-08 18:44:38 +0100469 Variable new_budget(this, MachineRepresentation::kWord32);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100470 Node* old_budget =
471 Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
Ben Murdochc5610432016-08-08 18:44:38 +0100472 new_budget.Bind(Int32Add(old_budget, weight));
473 Node* condition =
474 Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100475 Branch(condition, &ok, &interrupt_check);
476
477 // Perform interrupt and reset budget.
478 Bind(&interrupt_check);
Ben Murdochc5610432016-08-08 18:44:38 +0100479 {
480 CallRuntime(Runtime::kInterrupt, GetContext());
481 new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
482 Goto(&ok);
483 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100484
485 // Update budget.
486 Bind(&ok);
487 StoreNoWriteBarrier(MachineRepresentation::kWord32,
Ben Murdochc5610432016-08-08 18:44:38 +0100488 BytecodeArrayTaggedPointer(), budget_offset,
489 new_budget.value());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100490}
491
492Node* InterpreterAssembler::Advance(int delta) {
Ben Murdochda12d292016-06-02 14:46:10 +0100493 return IntPtrAdd(BytecodeOffset(), IntPtrConstant(delta));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100494}
495
496Node* InterpreterAssembler::Advance(Node* delta) {
497 return IntPtrAdd(BytecodeOffset(), delta);
498}
499
Ben Murdochc5610432016-08-08 18:44:38 +0100500Node* InterpreterAssembler::Jump(Node* delta) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100501 UpdateInterruptBudget(delta);
Ben Murdochc5610432016-08-08 18:44:38 +0100502 return DispatchTo(Advance(delta));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100503}
504
505void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
Ben Murdochc5610432016-08-08 18:44:38 +0100506 Label match(this), no_match(this);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100507
Ben Murdochc5610432016-08-08 18:44:38 +0100508 BranchIf(condition, &match, &no_match);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100509 Bind(&match);
510 Jump(delta);
511 Bind(&no_match);
512 Dispatch();
513}
514
515void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
516 JumpConditional(WordEqual(lhs, rhs), delta);
517}
518
519void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
520 Node* delta) {
521 JumpConditional(WordNotEqual(lhs, rhs), delta);
522}
523
Ben Murdochc5610432016-08-08 18:44:38 +0100524Node* InterpreterAssembler::Dispatch() {
525 return DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100526}
527
Ben Murdochc5610432016-08-08 18:44:38 +0100528Node* InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100529 Node* target_bytecode = Load(
530 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
Ben Murdochda12d292016-06-02 14:46:10 +0100531 if (kPointerSize == 8) {
532 target_bytecode = ChangeUint32ToUint64(target_bytecode);
533 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100534
Ben Murdochc5610432016-08-08 18:44:38 +0100535 if (FLAG_trace_ignition_dispatches) {
536 TraceBytecodeDispatch(target_bytecode);
537 }
538
539 Node* target_code_entry =
Ben Murdoch097c5b22016-05-18 11:27:45 +0100540 Load(MachineType::Pointer(), DispatchTableRawPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100541 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100542
Ben Murdochc5610432016-08-08 18:44:38 +0100543 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100544}
545
Ben Murdochc5610432016-08-08 18:44:38 +0100546Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
547 Node* bytecode_offset) {
548 Node* handler_entry =
549 IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
550 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
551}
552
553Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
554 Node* handler_entry, Node* bytecode_offset) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100555 if (FLAG_trace_ignition) {
556 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
557 }
558
559 InterpreterDispatchDescriptor descriptor(isolate());
Ben Murdochc5610432016-08-08 18:44:38 +0100560 Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
561 BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
562 return TailCallBytecodeDispatch(descriptor, handler_entry, args);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100563}
564
Ben Murdochda12d292016-06-02 14:46:10 +0100565void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
566 // Dispatching a wide bytecode requires treating the prefix
567 // bytecode a base pointer into the dispatch table and dispatching
568 // the bytecode that follows relative to this base.
569 //
570 // Indices 0-255 correspond to bytecodes with operand_scale == 0
571 // Indices 256-511 correspond to bytecodes with operand_scale == 1
572 // Indices 512-767 correspond to bytecodes with operand_scale == 2
573 Node* next_bytecode_offset = Advance(1);
574 Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
575 next_bytecode_offset);
576 if (kPointerSize == 8) {
577 next_bytecode = ChangeUint32ToUint64(next_bytecode);
578 }
Ben Murdochc5610432016-08-08 18:44:38 +0100579
580 if (FLAG_trace_ignition_dispatches) {
581 TraceBytecodeDispatch(next_bytecode);
582 }
583
Ben Murdochda12d292016-06-02 14:46:10 +0100584 Node* base_index;
585 switch (operand_scale) {
586 case OperandScale::kDouble:
587 base_index = IntPtrConstant(1 << kBitsPerByte);
588 break;
589 case OperandScale::kQuadruple:
590 base_index = IntPtrConstant(2 << kBitsPerByte);
591 break;
592 default:
593 UNREACHABLE();
594 base_index = nullptr;
595 }
596 Node* target_index = IntPtrAdd(base_index, next_bytecode);
Ben Murdochc5610432016-08-08 18:44:38 +0100597 Node* target_code_entry =
Ben Murdochda12d292016-06-02 14:46:10 +0100598 Load(MachineType::Pointer(), DispatchTableRawPointer(),
599 WordShl(target_index, kPointerSizeLog2));
600
Ben Murdochc5610432016-08-08 18:44:38 +0100601 DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
Ben Murdochda12d292016-06-02 14:46:10 +0100602}
603
Ben Murdochc5610432016-08-08 18:44:38 +0100604void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100605 // TODO(rmcilroy): Investigate whether it is worth supporting self
606 // optimization of primitive functions like FullCodegen.
607
608 // Update profiling count by -BytecodeOffset to simulate backedge to start of
609 // function.
610 Node* profiling_weight =
611 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
612 BytecodeOffset());
613 UpdateInterruptBudget(profiling_weight);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100614}
615
Ben Murdochc5610432016-08-08 18:44:38 +0100616Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100617 Node* sp = LoadStackPointer();
618 Node* stack_limit = Load(
619 MachineType::Pointer(),
620 ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
Ben Murdochc5610432016-08-08 18:44:38 +0100621 return UintPtrLessThan(sp, stack_limit);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100622}
623
624void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
625 disable_stack_check_across_call_ = true;
626 Node* abort_id = SmiTag(Int32Constant(bailout_reason));
Ben Murdochda12d292016-06-02 14:46:10 +0100627 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100628 disable_stack_check_across_call_ = false;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100629}
630
631void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
632 BailoutReason bailout_reason) {
Ben Murdochc5610432016-08-08 18:44:38 +0100633 Label ok(this), abort(this, Label::kDeferred);
634 BranchIfWordEqual(lhs, rhs, &ok, &abort);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100635
Ben Murdochc5610432016-08-08 18:44:38 +0100636 Bind(&abort);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100637 Abort(bailout_reason);
Ben Murdochc5610432016-08-08 18:44:38 +0100638 Goto(&ok);
639
640 Bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100641}
642
643void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
644 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
Ben Murdochda12d292016-06-02 14:46:10 +0100645 SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100646}
647
Ben Murdochc5610432016-08-08 18:44:38 +0100648void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
649 Node* counters_table = ExternalConstant(
650 ExternalReference::interpreter_dispatch_counters(isolate()));
651 Node* source_bytecode_table_index = IntPtrConstant(
652 static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
653
654 Node* counter_offset =
655 WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode),
656 IntPtrConstant(kPointerSizeLog2));
657 Node* old_counter =
658 Load(MachineType::IntPtr(), counters_table, counter_offset);
659
660 Label counter_ok(this), counter_saturated(this, Label::kDeferred);
661
662 Node* counter_reached_max = WordEqual(
663 old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
664 BranchIf(counter_reached_max, &counter_saturated, &counter_ok);
665
666 Bind(&counter_ok);
667 {
668 Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
669 StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
670 counter_offset, new_counter);
671 Goto(&counter_saturated);
672 }
673
674 Bind(&counter_saturated);
675}
676
Ben Murdoch097c5b22016-05-18 11:27:45 +0100677// static
678bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
679#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
680 return false;
681#elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC
682 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES);
Ben Murdochda12d292016-06-02 14:46:10 +0100683#elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
684 V8_TARGET_ARCH_S390
Ben Murdoch097c5b22016-05-18 11:27:45 +0100685 return true;
686#else
687#error "Unknown Architecture"
688#endif
689}
690
Ben Murdochc5610432016-08-08 18:44:38 +0100691Node* InterpreterAssembler::RegisterCount() {
692 Node* bytecode_array = LoadRegister(Register::bytecode_array());
693 Node* frame_size = LoadObjectField(
694 bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32());
695 return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2));
696}
697
698Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
699 if (FLAG_debug_code) {
700 Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
701 AbortIfWordNotEqual(
702 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
703 }
704
705 Variable var_index(this, MachineRepresentation::kWord32);
706 var_index.Bind(Int32Constant(0));
707
708 // Iterate over register file and write values into array.
709 // The mapping of register to array index must match that used in
710 // BytecodeGraphBuilder::VisitResumeGenerator.
711 Label loop(this, &var_index), done_loop(this);
712 Goto(&loop);
713 Bind(&loop);
714 {
715 Node* index = var_index.value();
716 Node* condition = Int32LessThan(index, RegisterCount());
717 GotoUnless(condition, &done_loop);
718
719 Node* reg_index =
720 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
721 Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index));
722
723 StoreFixedArrayElement(array, index, value);
724
725 var_index.Bind(Int32Add(index, Int32Constant(1)));
726 Goto(&loop);
727 }
728 Bind(&done_loop);
729
730 return array;
731}
732
733Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
734 if (FLAG_debug_code) {
735 Node* array_size = SmiUntag(LoadFixedArrayBaseLength(array));
736 AbortIfWordNotEqual(
737 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
738 }
739
740 Variable var_index(this, MachineRepresentation::kWord32);
741 var_index.Bind(Int32Constant(0));
742
743 // Iterate over array and write values into register file. Also erase the
744 // array contents to not keep them alive artificially.
745 Label loop(this, &var_index), done_loop(this);
746 Goto(&loop);
747 Bind(&loop);
748 {
749 Node* index = var_index.value();
750 Node* condition = Int32LessThan(index, RegisterCount());
751 GotoUnless(condition, &done_loop);
752
753 Node* value = LoadFixedArrayElement(array, index);
754
755 Node* reg_index =
756 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
757 StoreRegister(value, ChangeInt32ToIntPtr(reg_index));
758
759 StoreFixedArrayElement(array, index, StaleRegisterConstant());
760
761 var_index.Bind(Int32Add(index, Int32Constant(1)));
762 Goto(&loop);
763 }
764 Bind(&done_loop);
765
766 return array;
767}
768
Ben Murdoch097c5b22016-05-18 11:27:45 +0100769} // namespace interpreter
770} // namespace internal
771} // namespace v8