blob: 5767ffa8a5a423de9d09a85ce9bb0eb7f99fb2c3 [file] [log] [blame]
Ben Murdoch109988c2016-05-18 11:27:45 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter-assembler.h"
6
Ben Murdochbcf72ee2016-08-08 18:44:38 +01007#include <limits>
Ben Murdoch109988c2016-05-18 11:27:45 +01008#include <ostream>
9
10#include "src/code-factory.h"
11#include "src/frames.h"
12#include "src/interface-descriptors.h"
13#include "src/interpreter/bytecodes.h"
14#include "src/interpreter/interpreter.h"
15#include "src/machine-type.h"
16#include "src/macro-assembler.h"
Ben Murdochf3b273f2017-01-17 12:11:28 +000017#include "src/zone/zone.h"
Ben Murdoch109988c2016-05-18 11:27:45 +010018
19namespace v8 {
20namespace internal {
21namespace interpreter {
22
23using compiler::Node;
24
25InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
Ben Murdoch3b9bc312016-06-02 14:46:10 +010026 Bytecode bytecode,
27 OperandScale operand_scale)
Ben Murdochbcf72ee2016-08-08 18:44:38 +010028 : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate),
29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
30 Bytecodes::ToString(bytecode),
31 Bytecodes::ReturnCount(bytecode)),
Ben Murdoch109988c2016-05-18 11:27:45 +010032 bytecode_(bytecode),
Ben Murdoch3b9bc312016-06-02 14:46:10 +010033 operand_scale_(operand_scale),
Ben Murdochf91f0612016-11-29 16:50:11 +000034 bytecode_offset_(this, MachineType::PointerRepresentation()),
Ben Murdoch13e2dad2016-09-16 13:49:30 +010035 interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
Ben Murdoch109988c2016-05-18 11:27:45 +010036 accumulator_(this, MachineRepresentation::kTagged),
Ben Murdoch3b9bc312016-06-02 14:46:10 +010037 accumulator_use_(AccumulatorUse::kNone),
Ben Murdochbcf72ee2016-08-08 18:44:38 +010038 made_call_(false),
Ben Murdoch109988c2016-05-18 11:27:45 +010039 disable_stack_check_across_call_(false),
40 stack_pointer_before_call_(nullptr) {
Ben Murdochf91f0612016-11-29 16:50:11 +000041 accumulator_.Bind(Parameter(InterpreterDispatchDescriptor::kAccumulator));
42 bytecode_offset_.Bind(
43 Parameter(InterpreterDispatchDescriptor::kBytecodeOffset));
Ben Murdoch109988c2016-05-18 11:27:45 +010044 if (FLAG_trace_ignition) {
45 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
46 }
47}
48
Ben Murdoch3b9bc312016-06-02 14:46:10 +010049InterpreterAssembler::~InterpreterAssembler() {
50 // If the following check fails the handler does not use the
51 // accumulator in the way described in the bytecode definitions in
52 // bytecodes.h.
53 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
54}
Ben Murdoch109988c2016-05-18 11:27:45 +010055
Ben Murdoch13e2dad2016-09-16 13:49:30 +010056Node* InterpreterAssembler::GetInterpretedFramePointer() {
57 if (!interpreted_frame_pointer_.IsBound()) {
58 interpreted_frame_pointer_.Bind(LoadParentFramePointer());
59 }
60 return interpreted_frame_pointer_.value();
61}
62
Ben Murdoch3b9bc312016-06-02 14:46:10 +010063Node* InterpreterAssembler::GetAccumulatorUnchecked() {
64 return accumulator_.value();
65}
66
67Node* InterpreterAssembler::GetAccumulator() {
68 DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
69 accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
70 return GetAccumulatorUnchecked();
71}
Ben Murdoch109988c2016-05-18 11:27:45 +010072
73void InterpreterAssembler::SetAccumulator(Node* value) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +010074 DCHECK(Bytecodes::WritesAccumulator(bytecode_));
75 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
Ben Murdoch109988c2016-05-18 11:27:45 +010076 accumulator_.Bind(value);
77}
78
Ben Murdochbcf72ee2016-08-08 18:44:38 +010079Node* InterpreterAssembler::GetContext() {
80 return LoadRegister(Register::current_context());
81}
Ben Murdoch109988c2016-05-18 11:27:45 +010082
83void InterpreterAssembler::SetContext(Node* value) {
84 StoreRegister(value, Register::current_context());
Ben Murdoch109988c2016-05-18 11:27:45 +010085}
86
Ben Murdochf3b273f2017-01-17 12:11:28 +000087Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
88 Variable cur_context(this, MachineRepresentation::kTaggedPointer);
89 cur_context.Bind(context);
90
91 Variable cur_depth(this, MachineRepresentation::kWord32);
92 cur_depth.Bind(depth);
93
94 Label context_found(this);
95
96 Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
97 Label context_search(this, 2, context_search_loop_variables);
98
99 // Fast path if the depth is 0.
100 BranchIfWord32Equal(depth, Int32Constant(0), &context_found, &context_search);
101
102 // Loop until the depth is 0.
103 Bind(&context_search);
104 {
105 cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
106 cur_context.Bind(
107 LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX));
108
109 BranchIfWord32Equal(cur_depth.value(), Int32Constant(0), &context_found,
110 &context_search);
111 }
112
113 Bind(&context_found);
114 return cur_context.value();
115}
116
117void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
118 Node* depth,
119 Label* target) {
120 Variable cur_context(this, MachineRepresentation::kTaggedPointer);
121 cur_context.Bind(context);
122
123 Variable cur_depth(this, MachineRepresentation::kWord32);
124 cur_depth.Bind(depth);
125
126 Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
127 Label context_search(this, 2, context_search_loop_variables);
128
129 // Loop until the depth is 0.
130 Goto(&context_search);
131 Bind(&context_search);
132 {
133 // TODO(leszeks): We only need to do this check if the context had a sloppy
134 // eval, we could pass in a context chain bitmask to figure out which
135 // contexts actually need to be checked.
136
137 Node* extension_slot =
138 LoadContextSlot(cur_context.value(), Context::EXTENSION_INDEX);
139
140 // Jump to the target if the extension slot is not a hole.
141 GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
142
143 cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
144 cur_context.Bind(
145 LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX));
146
147 GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
148 &context_search);
149 }
150}
151
Ben Murdoch109988c2016-05-18 11:27:45 +0100152Node* InterpreterAssembler::BytecodeOffset() {
Ben Murdochf91f0612016-11-29 16:50:11 +0000153 return bytecode_offset_.value();
Ben Murdoch109988c2016-05-18 11:27:45 +0100154}
155
Ben Murdoch109988c2016-05-18 11:27:45 +0100156Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100157 if (made_call_) {
158 // If we have made a call, restore bytecode array from stack frame in case
159 // the debugger has swapped us to the patched debugger bytecode array.
160 return LoadRegister(Register::bytecode_array());
161 } else {
Ben Murdochf91f0612016-11-29 16:50:11 +0000162 return Parameter(InterpreterDispatchDescriptor::kBytecodeArray);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100163 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100164}
165
166Node* InterpreterAssembler::DispatchTableRawPointer() {
Ben Murdochf91f0612016-11-29 16:50:11 +0000167 return Parameter(InterpreterDispatchDescriptor::kDispatchTable);
Ben Murdoch109988c2016-05-18 11:27:45 +0100168}
169
170Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100171 return IntPtrAdd(GetInterpretedFramePointer(),
172 RegisterFrameOffset(reg_index));
Ben Murdoch109988c2016-05-18 11:27:45 +0100173}
174
175Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
176 return WordShl(index, kPointerSizeLog2);
177}
178
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100179Node* InterpreterAssembler::LoadRegister(Register reg) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100180 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100181 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
182}
183
Ben Murdoch109988c2016-05-18 11:27:45 +0100184Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100185 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
Ben Murdoch109988c2016-05-18 11:27:45 +0100186 RegisterFrameOffset(reg_index));
187}
188
Ben Murdoch109988c2016-05-18 11:27:45 +0100189Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100190 return StoreNoWriteBarrier(
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100191 MachineRepresentation::kTagged, GetInterpretedFramePointer(),
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100192 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
Ben Murdoch109988c2016-05-18 11:27:45 +0100193}
194
195Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
196 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100197 GetInterpretedFramePointer(),
Ben Murdoch109988c2016-05-18 11:27:45 +0100198 RegisterFrameOffset(reg_index), value);
199}
200
201Node* InterpreterAssembler::NextRegister(Node* reg_index) {
202 // Register indexes are negative, so the next index is minus one.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100203 return IntPtrAdd(reg_index, IntPtrConstant(-1));
Ben Murdoch109988c2016-05-18 11:27:45 +0100204}
205
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100206Node* InterpreterAssembler::OperandOffset(int operand_index) {
207 return IntPtrConstant(
208 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
Ben Murdoch109988c2016-05-18 11:27:45 +0100209}
210
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100211Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100212 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100213 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
214 bytecode_, operand_index, operand_scale()));
215 Node* operand_offset = OperandOffset(operand_index);
216 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
217 IntPtrAdd(BytecodeOffset(), operand_offset));
218}
219
220Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
221 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
222 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
223 bytecode_, operand_index, operand_scale()));
224 Node* operand_offset = OperandOffset(operand_index);
225 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
226 IntPtrAdd(BytecodeOffset(), operand_offset));
227
Ben Murdoch109988c2016-05-18 11:27:45 +0100228 // Ensure that we sign extend to full pointer size
229 if (kPointerSize == 8) {
230 load = ChangeInt32ToInt64(load);
231 }
232 return load;
233}
234
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100235compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
236 int relative_offset, MachineType result_type) {
237 static const int kMaxCount = 4;
238 DCHECK(!TargetSupportsUnalignedAccess());
239
240 int count;
241 switch (result_type.representation()) {
242 case MachineRepresentation::kWord16:
243 count = 2;
244 break;
245 case MachineRepresentation::kWord32:
246 count = 4;
247 break;
248 default:
249 UNREACHABLE();
250 break;
251 }
252 MachineType msb_type =
253 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
254
Ben Murdoch109988c2016-05-18 11:27:45 +0100255#if V8_TARGET_LITTLE_ENDIAN
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100256 const int kStep = -1;
257 int msb_offset = count - 1;
Ben Murdoch109988c2016-05-18 11:27:45 +0100258#elif V8_TARGET_BIG_ENDIAN
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100259 const int kStep = 1;
260 int msb_offset = 0;
Ben Murdoch109988c2016-05-18 11:27:45 +0100261#else
262#error "Unknown Architecture"
263#endif
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100264
265 // Read the most signicant bytecode into bytes[0] and then in order
266 // down to least significant in bytes[count - 1].
267 DCHECK(count <= kMaxCount);
268 compiler::Node* bytes[kMaxCount];
269 for (int i = 0; i < count; i++) {
270 MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
271 Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
272 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
273 bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
274 }
275
276 // Pack LSB to MSB.
277 Node* result = bytes[--count];
278 for (int i = 1; --count >= 0; i++) {
279 Node* shift = Int32Constant(i * kBitsPerByte);
280 Node* value = Word32Shl(bytes[count], shift);
281 result = Word32Or(value, result);
282 }
283 return result;
284}
285
286Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
287 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
288 DCHECK_EQ(
289 OperandSize::kShort,
290 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
291 int operand_offset =
292 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
293 if (TargetSupportsUnalignedAccess()) {
294 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
295 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
296 } else {
297 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
Ben Murdoch109988c2016-05-18 11:27:45 +0100298 }
299}
300
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100301Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100302 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100303 DCHECK_EQ(
304 OperandSize::kShort,
305 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
306 int operand_offset =
307 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
Ben Murdoch109988c2016-05-18 11:27:45 +0100308 Node* load;
309 if (TargetSupportsUnalignedAccess()) {
310 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100311 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
Ben Murdoch109988c2016-05-18 11:27:45 +0100312 } else {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100313 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
Ben Murdoch109988c2016-05-18 11:27:45 +0100314 }
315
316 // Ensure that we sign extend to full pointer size
317 if (kPointerSize == 8) {
318 load = ChangeInt32ToInt64(load);
319 }
320 return load;
321}
322
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100323Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
324 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
325 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
326 bytecode_, operand_index, operand_scale()));
327 int operand_offset =
328 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
329 if (TargetSupportsUnalignedAccess()) {
330 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
331 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
332 } else {
333 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
334 }
335}
336
337Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
338 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
339 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
340 bytecode_, operand_index, operand_scale()));
341 int operand_offset =
342 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
343 Node* load;
344 if (TargetSupportsUnalignedAccess()) {
345 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
346 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
347 } else {
348 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
349 }
350
351 // Ensure that we sign extend to full pointer size
352 if (kPointerSize == 8) {
353 load = ChangeInt32ToInt64(load);
354 }
355 return load;
356}
357
358Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
359 OperandSize operand_size) {
360 DCHECK(!Bytecodes::IsUnsignedOperandType(
361 Bytecodes::GetOperandType(bytecode_, operand_index)));
362 switch (operand_size) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100363 case OperandSize::kByte:
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100364 return BytecodeOperandSignedByte(operand_index);
Ben Murdoch109988c2016-05-18 11:27:45 +0100365 case OperandSize::kShort:
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100366 return BytecodeOperandSignedShort(operand_index);
367 case OperandSize::kQuad:
368 return BytecodeOperandSignedQuad(operand_index);
Ben Murdoch109988c2016-05-18 11:27:45 +0100369 case OperandSize::kNone:
370 UNREACHABLE();
371 }
372 return nullptr;
373}
374
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100375Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
376 OperandSize operand_size) {
377 DCHECK(Bytecodes::IsUnsignedOperandType(
378 Bytecodes::GetOperandType(bytecode_, operand_index)));
379 switch (operand_size) {
380 case OperandSize::kByte:
381 return BytecodeOperandUnsignedByte(operand_index);
382 case OperandSize::kShort:
383 return BytecodeOperandUnsignedShort(operand_index);
384 case OperandSize::kQuad:
385 return BytecodeOperandUnsignedQuad(operand_index);
386 case OperandSize::kNone:
387 UNREACHABLE();
388 }
389 return nullptr;
390}
391
392Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
393 DCHECK_EQ(OperandType::kRegCount,
394 Bytecodes::GetOperandType(bytecode_, operand_index));
395 OperandSize operand_size =
396 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
397 return BytecodeUnsignedOperand(operand_index, operand_size);
398}
399
400Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
401 DCHECK_EQ(OperandType::kFlag8,
402 Bytecodes::GetOperandType(bytecode_, operand_index));
403 OperandSize operand_size =
404 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
405 DCHECK_EQ(operand_size, OperandSize::kByte);
406 return BytecodeUnsignedOperand(operand_index, operand_size);
407}
408
Ben Murdochf3b273f2017-01-17 12:11:28 +0000409Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) {
410 DCHECK_EQ(OperandType::kUImm,
411 Bytecodes::GetOperandType(bytecode_, operand_index));
412 OperandSize operand_size =
413 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
414 return BytecodeUnsignedOperand(operand_index, operand_size);
415}
416
Ben Murdoch109988c2016-05-18 11:27:45 +0100417Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100418 DCHECK_EQ(OperandType::kImm,
Ben Murdoch109988c2016-05-18 11:27:45 +0100419 Bytecodes::GetOperandType(bytecode_, operand_index));
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100420 OperandSize operand_size =
421 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
422 return BytecodeSignedOperand(operand_index, operand_size);
Ben Murdoch109988c2016-05-18 11:27:45 +0100423}
424
425Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100426 DCHECK(OperandType::kIdx ==
427 Bytecodes::GetOperandType(bytecode_, operand_index));
428 OperandSize operand_size =
429 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
430 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch109988c2016-05-18 11:27:45 +0100431}
432
433Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100434 DCHECK(Bytecodes::IsRegisterOperandType(
435 Bytecodes::GetOperandType(bytecode_, operand_index)));
436 OperandSize operand_size =
437 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
438 return BytecodeSignedOperand(operand_index, operand_size);
439}
440
441Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
442 DCHECK(OperandType::kRuntimeId ==
443 Bytecodes::GetOperandType(bytecode_, operand_index));
444 OperandSize operand_size =
445 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
446 DCHECK_EQ(operand_size, OperandSize::kShort);
447 return BytecodeUnsignedOperand(operand_index, operand_size);
Ben Murdoch109988c2016-05-18 11:27:45 +0100448}
449
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100450Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
451 DCHECK(OperandType::kIntrinsicId ==
452 Bytecodes::GetOperandType(bytecode_, operand_index));
453 OperandSize operand_size =
454 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
455 DCHECK_EQ(operand_size, OperandSize::kByte);
456 return BytecodeUnsignedOperand(operand_index, operand_size);
457}
458
Ben Murdoch109988c2016-05-18 11:27:45 +0100459Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
460 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
461 BytecodeArray::kConstantPoolOffset);
462 Node* entry_offset =
463 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
464 WordShl(index, kPointerSizeLog2));
465 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
466}
467
Ben Murdochf91f0612016-11-29 16:50:11 +0000468Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
469 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
470 BytecodeArray::kConstantPoolOffset);
471 int offset = FixedArray::kHeaderSize - kHeapObjectTag;
472#if V8_TARGET_LITTLE_ENDIAN
473 if (Is64()) {
474 offset += kPointerSize / 2;
475 }
476#endif
477 Node* entry_offset =
478 IntPtrAdd(IntPtrConstant(offset), WordShl(index, kPointerSizeLog2));
479 if (Is64()) {
480 return ChangeInt32ToInt64(
481 Load(MachineType::Int32(), constant_pool, entry_offset));
482 } else {
483 return SmiUntag(
484 Load(MachineType::AnyTagged(), constant_pool, entry_offset));
485 }
486}
487
Ben Murdoch109988c2016-05-18 11:27:45 +0100488Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) {
489 return Load(MachineType::AnyTagged(), context,
490 IntPtrConstant(Context::SlotOffset(slot_index)));
491}
492
493Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) {
494 Node* offset =
495 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100496 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch109988c2016-05-18 11:27:45 +0100497 return Load(MachineType::AnyTagged(), context, offset);
498}
499
500Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index,
501 Node* value) {
502 Node* offset =
503 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100504 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
Ben Murdoch109988c2016-05-18 11:27:45 +0100505 return Store(MachineRepresentation::kTagged, context, offset, value);
506}
507
508Node* InterpreterAssembler::LoadTypeFeedbackVector() {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100509 Node* function = LoadRegister(Register::function_closure());
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100510 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
Ben Murdoch109988c2016-05-18 11:27:45 +0100511 Node* vector =
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100512 LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
Ben Murdoch109988c2016-05-18 11:27:45 +0100513 return vector;
514}
515
516void InterpreterAssembler::CallPrologue() {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100517 StoreRegister(SmiTag(BytecodeOffset()), Register::bytecode_offset());
Ben Murdoch109988c2016-05-18 11:27:45 +0100518
519 if (FLAG_debug_code && !disable_stack_check_across_call_) {
520 DCHECK(stack_pointer_before_call_ == nullptr);
521 stack_pointer_before_call_ = LoadStackPointer();
522 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100523 made_call_ = true;
Ben Murdoch109988c2016-05-18 11:27:45 +0100524}
525
526void InterpreterAssembler::CallEpilogue() {
527 if (FLAG_debug_code && !disable_stack_check_across_call_) {
528 Node* stack_pointer_after_call = LoadStackPointer();
529 Node* stack_pointer_before_call = stack_pointer_before_call_;
530 stack_pointer_before_call_ = nullptr;
531 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
532 kUnexpectedStackPointer);
533 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100534}
535
Ben Murdochf3b273f2017-01-17 12:11:28 +0000536Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector,
537 Node* slot_id) {
538 Comment("increment call count");
539 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
540 Node* call_count =
541 LoadFixedArrayElement(type_feedback_vector, call_count_slot);
542 Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1)));
543 // Count is Smi, so we don't need a write barrier.
544 return StoreFixedArrayElement(type_feedback_vector, call_count_slot,
545 new_count, SKIP_WRITE_BARRIER);
546}
547
Ben Murdochf91f0612016-11-29 16:50:11 +0000548Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context,
549 Node* first_arg, Node* arg_count,
550 Node* slot_id,
551 Node* type_feedback_vector,
552 TailCallMode tail_call_mode) {
553 // Static checks to assert it is safe to examine the type feedback element.
554 // We don't know that we have a weak cell. We might have a private symbol
555 // or an AllocationSite, but the memory is safe to examine.
556 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
557 // FixedArray.
558 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
559 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
560 // computed, meaning that it can't appear to be a pointer. If the low bit is
561 // 0, then hash is computed, but the 0 bit prevents the field from appearing
562 // to be a pointer.
563 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
564 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
565 WeakCell::kValueOffset &&
566 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
567
568 Variable return_value(this, MachineRepresentation::kTagged);
Ben Murdochf3b273f2017-01-17 12:11:28 +0000569 Label handle_monomorphic(this), extra_checks(this), end(this), call(this),
570 call_function(this), call_without_feedback(this);
Ben Murdochf91f0612016-11-29 16:50:11 +0000571
572 // Slot id of 0 is used to indicate no typefeedback is available. Call using
573 // call builtin.
574 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
575 Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0));
Ben Murdochf3b273f2017-01-17 12:11:28 +0000576 GotoIf(is_feedback_unavailable, &call_without_feedback);
Ben Murdochf91f0612016-11-29 16:50:11 +0000577
Ben Murdochf3b273f2017-01-17 12:11:28 +0000578 // The checks. First, does function match the recorded monomorphic target?
Ben Murdochf91f0612016-11-29 16:50:11 +0000579 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id);
580 Node* feedback_value = LoadWeakCellValue(feedback_element);
581 Node* is_monomorphic = WordEqual(function, feedback_value);
582 BranchIf(is_monomorphic, &handle_monomorphic, &extra_checks);
583
584 Bind(&handle_monomorphic);
585 {
586 // The compare above could have been a SMI/SMI comparison. Guard against
587 // this convincing us that we have a monomorphic JSFunction.
588 Node* is_smi = WordIsSmi(function);
589 GotoIf(is_smi, &extra_checks);
590
591 // Increment the call count.
Ben Murdochf3b273f2017-01-17 12:11:28 +0000592 IncrementCallCount(type_feedback_vector, slot_id);
Ben Murdochf91f0612016-11-29 16:50:11 +0000593
594 // Call using call function builtin.
595 Callable callable = CodeFactory::InterpreterPushArgsAndCall(
596 isolate(), tail_call_mode, CallableType::kJSFunction);
597 Node* code_target = HeapConstant(callable.code());
598 Node* ret_value = CallStub(callable.descriptor(), code_target, context,
599 arg_count, first_arg, function);
600 return_value.Bind(ret_value);
601 Goto(&end);
602 }
603
604 Bind(&extra_checks);
605 {
Ben Murdochf3b273f2017-01-17 12:11:28 +0000606 Label check_initialized(this, Label::kDeferred), mark_megamorphic(this),
607 check_allocation_site(this),
608 create_allocation_site(this, Label::kDeferred);
Ben Murdochf91f0612016-11-29 16:50:11 +0000609 // Check if it is a megamorphic target
610 Node* is_megamorphic = WordEqual(
611 feedback_element,
612 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
Ben Murdochf3b273f2017-01-17 12:11:28 +0000613 BranchIf(is_megamorphic, &call, &check_allocation_site);
614
615 Bind(&check_allocation_site);
616 {
617 Node* is_allocation_site =
618 WordEqual(LoadMap(feedback_element),
619 LoadRoot(Heap::kAllocationSiteMapRootIndex));
620 GotoUnless(is_allocation_site, &check_initialized);
621
622 // If it is not the Array() function, mark megamorphic.
623 Node* context_slot =
624 LoadFixedArrayElement(LoadNativeContext(context),
625 Int32Constant(Context::ARRAY_FUNCTION_INDEX));
626 Node* is_array_function = WordEqual(context_slot, function);
627 GotoUnless(is_array_function, &mark_megamorphic);
628
629 // It is a monomorphic Array function. Increment the call count.
630 IncrementCallCount(type_feedback_vector, slot_id);
631
632 // Call ArrayConstructorStub.
633 Callable callable_call =
634 CodeFactory::InterpreterPushArgsAndConstructArray(isolate());
635 Node* code_target_call = HeapConstant(callable_call.code());
636 Node* ret_value =
637 CallStub(callable_call.descriptor(), code_target_call, context,
638 arg_count, function, feedback_element, first_arg);
639 return_value.Bind(ret_value);
640 Goto(&end);
641 }
Ben Murdochf91f0612016-11-29 16:50:11 +0000642
643 Bind(&check_initialized);
644 {
645 Label possibly_monomorphic(this);
646 // Check if it is uninitialized.
647 Node* is_uninitialized = WordEqual(
648 feedback_element,
649 HeapConstant(TypeFeedbackVector::UninitializedSentinel(isolate())));
650 GotoUnless(is_uninitialized, &mark_megamorphic);
651
652 Node* is_smi = WordIsSmi(function);
653 GotoIf(is_smi, &mark_megamorphic);
654
655 // Check if function is an object of JSFunction type
656 Node* instance_type = LoadInstanceType(function);
657 Node* is_js_function =
658 WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE));
659 GotoUnless(is_js_function, &mark_megamorphic);
660
Ben Murdochf3b273f2017-01-17 12:11:28 +0000661 // Check if it is the Array() function.
Ben Murdochf91f0612016-11-29 16:50:11 +0000662 Node* context_slot =
663 LoadFixedArrayElement(LoadNativeContext(context),
664 Int32Constant(Context::ARRAY_FUNCTION_INDEX));
665 Node* is_array_function = WordEqual(context_slot, function);
Ben Murdochf3b273f2017-01-17 12:11:28 +0000666 GotoIf(is_array_function, &create_allocation_site);
Ben Murdochf91f0612016-11-29 16:50:11 +0000667
668 // Check if the function belongs to the same native context
669 Node* native_context = LoadNativeContext(
670 LoadObjectField(function, JSFunction::kContextOffset));
671 Node* is_same_native_context =
672 WordEqual(native_context, LoadNativeContext(context));
673 GotoUnless(is_same_native_context, &mark_megamorphic);
674
Ben Murdochf91f0612016-11-29 16:50:11 +0000675 CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id),
676 function);
677
678 // Call using call function builtin.
Ben Murdochf3b273f2017-01-17 12:11:28 +0000679 Goto(&call_function);
680 }
681
682 Bind(&create_allocation_site);
683 {
684 CreateAllocationSiteInFeedbackVector(type_feedback_vector,
685 SmiTag(slot_id));
686
687 // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state.
688 // They start collecting feedback only when a call is executed the second
689 // time. So, do not pass any feedback here.
690 Goto(&call_function);
Ben Murdochf91f0612016-11-29 16:50:11 +0000691 }
692
693 Bind(&mark_megamorphic);
694 {
695 // Mark it as a megamorphic.
696 // MegamorphicSentinel is created as a part of Heap::InitialObjects
697 // and will not move during a GC. So it is safe to skip write barrier.
698 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
699 StoreFixedArrayElement(
700 type_feedback_vector, slot_id,
701 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())),
702 SKIP_WRITE_BARRIER);
703 Goto(&call);
704 }
705 }
706
Ben Murdochf3b273f2017-01-17 12:11:28 +0000707 Bind(&call_function);
708 {
709 // Increment the call count.
710 IncrementCallCount(type_feedback_vector, slot_id);
711
712 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
713 isolate(), tail_call_mode, CallableType::kJSFunction);
714 Node* code_target_call = HeapConstant(callable_call.code());
715 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
716 context, arg_count, first_arg, function);
717 return_value.Bind(ret_value);
718 Goto(&end);
719 }
720
Ben Murdochf91f0612016-11-29 16:50:11 +0000721 Bind(&call);
722 {
Ben Murdochf3b273f2017-01-17 12:11:28 +0000723 // Increment the call count.
724 IncrementCallCount(type_feedback_vector, slot_id);
725
726 // Call using call builtin.
727 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
728 isolate(), tail_call_mode, CallableType::kAny);
729 Node* code_target_call = HeapConstant(callable_call.code());
730 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
731 context, arg_count, first_arg, function);
732 return_value.Bind(ret_value);
733 Goto(&end);
734 }
735
736 Bind(&call_without_feedback);
737 {
Ben Murdochf91f0612016-11-29 16:50:11 +0000738 // Call using call builtin.
739 Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
740 isolate(), tail_call_mode, CallableType::kAny);
741 Node* code_target_call = HeapConstant(callable_call.code());
742 Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
743 context, arg_count, first_arg, function);
744 return_value.Bind(ret_value);
745 Goto(&end);
746 }
747
748 Bind(&end);
749 return return_value.value();
750}
751
Ben Murdoch109988c2016-05-18 11:27:45 +0100752Node* InterpreterAssembler::CallJS(Node* function, Node* context,
753 Node* first_arg, Node* arg_count,
754 TailCallMode tail_call_mode) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000755 Callable callable = CodeFactory::InterpreterPushArgsAndCall(
756 isolate(), tail_call_mode, CallableType::kAny);
Ben Murdoch109988c2016-05-18 11:27:45 +0100757 Node* code_target = HeapConstant(callable.code());
758 return CallStub(callable.descriptor(), code_target, context, arg_count,
759 first_arg, function);
760}
761
762Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
763 Node* new_target, Node* first_arg,
Ben Murdochf3b273f2017-01-17 12:11:28 +0000764 Node* arg_count, Node* slot_id,
765 Node* type_feedback_vector) {
766 Label call_construct(this), js_function(this), end(this);
767 Variable return_value(this, MachineRepresentation::kTagged);
768 Variable allocation_feedback(this, MachineRepresentation::kTagged);
769 allocation_feedback.Bind(UndefinedConstant());
770
771 // Slot id of 0 is used to indicate no type feedback is available.
772 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
773 Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0));
774 GotoIf(is_feedback_unavailable, &call_construct);
775
776 // Check that the constructor is not a smi.
777 Node* is_smi = WordIsSmi(constructor);
778 GotoIf(is_smi, &call_construct);
779
780 // Check that constructor is a JSFunction.
781 Node* instance_type = LoadInstanceType(constructor);
782 Node* is_js_function =
783 WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE));
784 BranchIf(is_js_function, &js_function, &call_construct);
785
786 Bind(&js_function);
787 {
788 // Cache the called function in a feedback vector slot. Cache states
789 // are uninitialized, monomorphic (indicated by a JSFunction), and
790 // megamorphic.
791 // TODO(mythria/v8:5210): Check if it is better to mark extra_checks as a
792 // deferred block so that call_construct_function will be scheduled.
793 Label extra_checks(this), call_construct_function(this);
794
795 Node* feedback_element =
796 LoadFixedArrayElement(type_feedback_vector, slot_id);
797 Node* feedback_value = LoadWeakCellValue(feedback_element);
798 Node* is_monomorphic = WordEqual(constructor, feedback_value);
799 BranchIf(is_monomorphic, &call_construct_function, &extra_checks);
800
801 Bind(&extra_checks);
802 {
803 Label mark_megamorphic(this), initialize(this),
804 check_allocation_site(this), check_initialized(this),
805 set_alloc_feedback_and_call(this);
806 {
807 // Check if it is a megamorphic target
808 Comment("check if megamorphic");
809 Node* is_megamorphic = WordEqual(
810 feedback_element,
811 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
812 GotoIf(is_megamorphic, &call_construct_function);
813
814 Comment("check if weak cell");
815 Node* is_weak_cell = WordEqual(LoadMap(feedback_element),
816 LoadRoot(Heap::kWeakCellMapRootIndex));
817 GotoUnless(is_weak_cell, &check_allocation_site);
818 // If the weak cell is cleared, we have a new chance to become
819 // monomorphic.
820 Comment("check if weak cell is cleared");
821 Node* is_smi = WordIsSmi(feedback_value);
822 BranchIf(is_smi, &initialize, &mark_megamorphic);
823 }
824
825 Bind(&check_allocation_site);
826 {
827 Comment("check if it is an allocation site");
828 Node* is_allocation_site =
829 WordEqual(LoadObjectField(feedback_element, 0),
830 LoadRoot(Heap::kAllocationSiteMapRootIndex));
831 GotoUnless(is_allocation_site, &check_initialized);
832
833 // Make sure the function is the Array() function
834 Node* context_slot =
835 LoadFixedArrayElement(LoadNativeContext(context),
836 Int32Constant(Context::ARRAY_FUNCTION_INDEX));
837 Node* is_array_function = WordEqual(context_slot, constructor);
838 BranchIf(is_array_function, &set_alloc_feedback_and_call,
839 &mark_megamorphic);
840 }
841
842 Bind(&set_alloc_feedback_and_call);
843 {
844 allocation_feedback.Bind(feedback_element);
845 Goto(&call_construct_function);
846 }
847
848 Bind(&check_initialized);
849 {
850 // Check if it is uninitialized.
851 Comment("check if uninitialized");
852 Node* is_uninitialized = WordEqual(
853 feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
854 BranchIf(is_uninitialized, &initialize, &mark_megamorphic);
855 }
856
857 Bind(&initialize);
858 {
859 Label create_weak_cell(this), create_allocation_site(this);
860 Comment("initialize the feedback element");
861 // Check that it is the Array() function.
862 Node* context_slot =
863 LoadFixedArrayElement(LoadNativeContext(context),
864 Int32Constant(Context::ARRAY_FUNCTION_INDEX));
865 Node* is_array_function = WordEqual(context_slot, constructor);
866 BranchIf(is_array_function, &create_allocation_site, &create_weak_cell);
867
868 Bind(&create_allocation_site);
869 {
870 Node* site = CreateAllocationSiteInFeedbackVector(
871 type_feedback_vector, SmiTag(slot_id));
872 allocation_feedback.Bind(site);
873 Goto(&call_construct_function);
874 }
875
876 Bind(&create_weak_cell);
877 {
878 CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id),
879 constructor);
880 Goto(&call_construct_function);
881 }
882 }
883
884 Bind(&mark_megamorphic);
885 {
886 // MegamorphicSentinel is an immortal immovable object so
887 // write-barrier is not needed.
888 Comment("transition to megamorphic");
889 DCHECK(
890 Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
891 StoreFixedArrayElement(
892 type_feedback_vector, slot_id,
893 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())),
894 SKIP_WRITE_BARRIER);
895 Goto(&call_construct_function);
896 }
897 }
898
899 Bind(&call_construct_function);
900 {
901 Comment("call using callConstructFunction");
902 IncrementCallCount(type_feedback_vector, slot_id);
903 Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct(
904 isolate(), CallableType::kJSFunction);
905 return_value.Bind(CallStub(callable_function.descriptor(),
906 HeapConstant(callable_function.code()),
907 context, arg_count, new_target, constructor,
908 allocation_feedback.value(), first_arg));
909 Goto(&end);
910 }
911 }
912
913 Bind(&call_construct);
914 {
915 Comment("call using callConstruct builtin");
916 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(
917 isolate(), CallableType::kAny);
918 Node* code_target = HeapConstant(callable.code());
919 return_value.Bind(CallStub(callable.descriptor(), code_target, context,
920 arg_count, new_target, constructor,
921 UndefinedConstant(), first_arg));
922 Goto(&end);
923 }
924
925 Bind(&end);
926 return return_value.value();
Ben Murdoch109988c2016-05-18 11:27:45 +0100927}
928
929Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
930 Node* first_arg, Node* arg_count,
931 int result_size) {
932 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
933 Node* code_target = HeapConstant(callable.code());
934
935 // Get the function entry from the function id.
936 Node* function_table = ExternalConstant(
937 ExternalReference::runtime_function_table_address(isolate()));
938 Node* function_offset =
939 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
940 Node* function = IntPtrAdd(function_table, function_offset);
941 Node* function_entry =
942 Load(MachineType::Pointer(), function,
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100943 IntPtrConstant(offsetof(Runtime::Function, entry)));
Ben Murdoch109988c2016-05-18 11:27:45 +0100944
945 return CallStub(callable.descriptor(), code_target, context, arg_count,
946 first_arg, function_entry, result_size);
947}
948
949void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
Ben Murdochf3b273f2017-01-17 12:11:28 +0000950 // TODO(rmcilroy): It might be worthwhile to only update the budget for
951 // backwards branches. Those are distinguishable by the {JumpLoop} bytecode.
952
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100953 Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
Ben Murdoch109988c2016-05-18 11:27:45 +0100954 Node* budget_offset =
955 IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
956
957 // Update budget by |weight| and check if it reaches zero.
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100958 Variable new_budget(this, MachineRepresentation::kWord32);
Ben Murdoch109988c2016-05-18 11:27:45 +0100959 Node* old_budget =
960 Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100961 new_budget.Bind(Int32Add(old_budget, weight));
962 Node* condition =
963 Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
Ben Murdoch109988c2016-05-18 11:27:45 +0100964 Branch(condition, &ok, &interrupt_check);
965
966 // Perform interrupt and reset budget.
967 Bind(&interrupt_check);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100968 {
969 CallRuntime(Runtime::kInterrupt, GetContext());
970 new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
971 Goto(&ok);
972 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100973
974 // Update budget.
975 Bind(&ok);
976 StoreNoWriteBarrier(MachineRepresentation::kWord32,
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100977 BytecodeArrayTaggedPointer(), budget_offset,
978 new_budget.value());
Ben Murdoch109988c2016-05-18 11:27:45 +0100979}
980
Ben Murdochf91f0612016-11-29 16:50:11 +0000981Node* InterpreterAssembler::Advance() {
982 return Advance(Bytecodes::Size(bytecode_, operand_scale_));
983}
984
Ben Murdoch109988c2016-05-18 11:27:45 +0100985Node* InterpreterAssembler::Advance(int delta) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000986 return Advance(IntPtrConstant(delta));
Ben Murdoch109988c2016-05-18 11:27:45 +0100987}
988
989Node* InterpreterAssembler::Advance(Node* delta) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000990 if (FLAG_trace_ignition) {
991 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
992 }
993 Node* next_offset = IntPtrAdd(BytecodeOffset(), delta);
994 bytecode_offset_.Bind(next_offset);
995 return next_offset;
Ben Murdoch109988c2016-05-18 11:27:45 +0100996}
997
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100998Node* InterpreterAssembler::Jump(Node* delta) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000999 DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
1000
Ben Murdoch109988c2016-05-18 11:27:45 +01001001 UpdateInterruptBudget(delta);
Ben Murdochf91f0612016-11-29 16:50:11 +00001002 Node* new_bytecode_offset = Advance(delta);
1003 Node* target_bytecode = LoadBytecode(new_bytecode_offset);
1004 return DispatchToBytecode(target_bytecode, new_bytecode_offset);
Ben Murdoch109988c2016-05-18 11:27:45 +01001005}
1006
1007void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001008 Label match(this), no_match(this);
Ben Murdoch109988c2016-05-18 11:27:45 +01001009
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001010 BranchIf(condition, &match, &no_match);
Ben Murdoch109988c2016-05-18 11:27:45 +01001011 Bind(&match);
1012 Jump(delta);
1013 Bind(&no_match);
1014 Dispatch();
1015}
1016
1017void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
1018 JumpConditional(WordEqual(lhs, rhs), delta);
1019}
1020
1021void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
1022 Node* delta) {
1023 JumpConditional(WordNotEqual(lhs, rhs), delta);
1024}
1025
Ben Murdochf91f0612016-11-29 16:50:11 +00001026Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) {
1027 Node* bytecode =
1028 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
1029 if (kPointerSize == 8) {
1030 bytecode = ChangeUint32ToUint64(bytecode);
1031 }
1032 return bytecode;
Ben Murdoch109988c2016-05-18 11:27:45 +01001033}
1034
Ben Murdochf91f0612016-11-29 16:50:11 +00001035Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
1036 Label do_inline_star(this), done(this);
Ben Murdoch109988c2016-05-18 11:27:45 +01001037
Ben Murdochf91f0612016-11-29 16:50:11 +00001038 Variable var_bytecode(this, MachineRepresentation::kWord8);
1039 var_bytecode.Bind(target_bytecode);
1040
1041 Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
1042 Node* is_star = WordEqual(target_bytecode, star_bytecode);
1043 BranchIf(is_star, &do_inline_star, &done);
1044
1045 Bind(&do_inline_star);
1046 {
1047 InlineStar();
1048 var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
1049 Goto(&done);
1050 }
1051 Bind(&done);
1052 return var_bytecode.value();
1053}
1054
1055void InterpreterAssembler::InlineStar() {
1056 Bytecode previous_bytecode = bytecode_;
1057 AccumulatorUse previous_acc_use = accumulator_use_;
1058
1059 bytecode_ = Bytecode::kStar;
1060 accumulator_use_ = AccumulatorUse::kNone;
1061
1062 if (FLAG_trace_ignition) {
1063 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
1064 }
1065 StoreRegister(GetAccumulator(), BytecodeOperandReg(0));
1066
1067 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
1068
1069 Advance();
1070 bytecode_ = previous_bytecode;
1071 accumulator_use_ = previous_acc_use;
1072}
1073
1074Node* InterpreterAssembler::Dispatch() {
1075 Node* target_offset = Advance();
1076 Node* target_bytecode = LoadBytecode(target_offset);
1077
1078 if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
1079 target_bytecode = StarDispatchLookahead(target_bytecode);
1080 }
1081 return DispatchToBytecode(target_bytecode, BytecodeOffset());
1082}
1083
1084Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
1085 Node* new_bytecode_offset) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001086 if (FLAG_trace_ignition_dispatches) {
1087 TraceBytecodeDispatch(target_bytecode);
1088 }
1089
1090 Node* target_code_entry =
Ben Murdoch109988c2016-05-18 11:27:45 +01001091 Load(MachineType::Pointer(), DispatchTableRawPointer(),
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001092 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
Ben Murdoch109988c2016-05-18 11:27:45 +01001093
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001094 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
Ben Murdoch109988c2016-05-18 11:27:45 +01001095}
1096
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001097Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
1098 Node* bytecode_offset) {
1099 Node* handler_entry =
1100 IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
1101 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
1102}
1103
1104Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
1105 Node* handler_entry, Node* bytecode_offset) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001106 InterpreterDispatchDescriptor descriptor(isolate());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001107 Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
1108 BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
1109 return TailCallBytecodeDispatch(descriptor, handler_entry, args);
Ben Murdoch109988c2016-05-18 11:27:45 +01001110}
1111
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001112void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
1113 // Dispatching a wide bytecode requires treating the prefix
1114 // bytecode a base pointer into the dispatch table and dispatching
1115 // the bytecode that follows relative to this base.
1116 //
1117 // Indices 0-255 correspond to bytecodes with operand_scale == 0
1118 // Indices 256-511 correspond to bytecodes with operand_scale == 1
1119 // Indices 512-767 correspond to bytecodes with operand_scale == 2
1120 Node* next_bytecode_offset = Advance(1);
Ben Murdochf91f0612016-11-29 16:50:11 +00001121 Node* next_bytecode = LoadBytecode(next_bytecode_offset);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001122
1123 if (FLAG_trace_ignition_dispatches) {
1124 TraceBytecodeDispatch(next_bytecode);
1125 }
1126
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001127 Node* base_index;
1128 switch (operand_scale) {
1129 case OperandScale::kDouble:
1130 base_index = IntPtrConstant(1 << kBitsPerByte);
1131 break;
1132 case OperandScale::kQuadruple:
1133 base_index = IntPtrConstant(2 << kBitsPerByte);
1134 break;
1135 default:
1136 UNREACHABLE();
1137 base_index = nullptr;
1138 }
1139 Node* target_index = IntPtrAdd(base_index, next_bytecode);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001140 Node* target_code_entry =
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001141 Load(MachineType::Pointer(), DispatchTableRawPointer(),
1142 WordShl(target_index, kPointerSizeLog2));
1143
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001144 DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001145}
1146
Ben Murdochf91f0612016-11-29 16:50:11 +00001147Node* InterpreterAssembler::TruncateTaggedToWord32WithFeedback(
1148 Node* context, Node* value, Variable* var_type_feedback) {
1149 // We might need to loop once due to ToNumber conversion.
1150 Variable var_value(this, MachineRepresentation::kTagged),
1151 var_result(this, MachineRepresentation::kWord32);
1152 Variable* loop_vars[] = {&var_value, var_type_feedback};
1153 Label loop(this, 2, loop_vars), done_loop(this, &var_result);
1154 var_value.Bind(value);
1155 var_type_feedback->Bind(Int32Constant(BinaryOperationFeedback::kNone));
1156 Goto(&loop);
1157 Bind(&loop);
1158 {
1159 // Load the current {value}.
1160 value = var_value.value();
1161
1162 // Check if the {value} is a Smi or a HeapObject.
1163 Label if_valueissmi(this), if_valueisnotsmi(this);
1164 Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
1165
1166 Bind(&if_valueissmi);
1167 {
1168 // Convert the Smi {value}.
1169 var_result.Bind(SmiToWord32(value));
1170 var_type_feedback->Bind(
1171 Word32Or(var_type_feedback->value(),
1172 Int32Constant(BinaryOperationFeedback::kSignedSmall)));
1173 Goto(&done_loop);
1174 }
1175
1176 Bind(&if_valueisnotsmi);
1177 {
1178 // Check if {value} is a HeapNumber.
1179 Label if_valueisheapnumber(this),
1180 if_valueisnotheapnumber(this, Label::kDeferred);
1181 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
1182 &if_valueisheapnumber, &if_valueisnotheapnumber);
1183
1184 Bind(&if_valueisheapnumber);
1185 {
1186 // Truncate the floating point value.
1187 var_result.Bind(TruncateHeapNumberValueToWord32(value));
1188 var_type_feedback->Bind(
1189 Word32Or(var_type_feedback->value(),
1190 Int32Constant(BinaryOperationFeedback::kNumber)));
1191 Goto(&done_loop);
1192 }
1193
1194 Bind(&if_valueisnotheapnumber);
1195 {
1196 // Convert the {value} to a Number first.
1197 Callable callable = CodeFactory::NonNumberToNumber(isolate());
1198 var_value.Bind(CallStub(callable, context, value));
1199 var_type_feedback->Bind(Int32Constant(BinaryOperationFeedback::kAny));
1200 Goto(&loop);
1201 }
1202 }
1203 }
1204 Bind(&done_loop);
1205 return var_result.value();
1206}
1207
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001208void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
Ben Murdoch109988c2016-05-18 11:27:45 +01001209 // TODO(rmcilroy): Investigate whether it is worth supporting self
1210 // optimization of primitive functions like FullCodegen.
1211
1212 // Update profiling count by -BytecodeOffset to simulate backedge to start of
1213 // function.
1214 Node* profiling_weight =
1215 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
1216 BytecodeOffset());
1217 UpdateInterruptBudget(profiling_weight);
Ben Murdoch109988c2016-05-18 11:27:45 +01001218}
1219
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001220Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
Ben Murdoch109988c2016-05-18 11:27:45 +01001221 Node* sp = LoadStackPointer();
1222 Node* stack_limit = Load(
1223 MachineType::Pointer(),
1224 ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001225 return UintPtrLessThan(sp, stack_limit);
Ben Murdoch109988c2016-05-18 11:27:45 +01001226}
1227
Ben Murdochf91f0612016-11-29 16:50:11 +00001228Node* InterpreterAssembler::LoadOSRNestingLevel() {
1229 Node* offset =
1230 IntPtrConstant(BytecodeArray::kOSRNestingLevelOffset - kHeapObjectTag);
1231 return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), offset);
1232}
1233
Ben Murdoch109988c2016-05-18 11:27:45 +01001234void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
1235 disable_stack_check_across_call_ = true;
1236 Node* abort_id = SmiTag(Int32Constant(bailout_reason));
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001237 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
Ben Murdoch109988c2016-05-18 11:27:45 +01001238 disable_stack_check_across_call_ = false;
Ben Murdoch109988c2016-05-18 11:27:45 +01001239}
1240
1241void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
1242 BailoutReason bailout_reason) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001243 Label ok(this), abort(this, Label::kDeferred);
1244 BranchIfWordEqual(lhs, rhs, &ok, &abort);
Ben Murdoch109988c2016-05-18 11:27:45 +01001245
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001246 Bind(&abort);
Ben Murdoch109988c2016-05-18 11:27:45 +01001247 Abort(bailout_reason);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001248 Goto(&ok);
1249
1250 Bind(&ok);
Ben Murdoch109988c2016-05-18 11:27:45 +01001251}
1252
1253void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
1254 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001255 SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
Ben Murdoch109988c2016-05-18 11:27:45 +01001256}
1257
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001258void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
1259 Node* counters_table = ExternalConstant(
1260 ExternalReference::interpreter_dispatch_counters(isolate()));
1261 Node* source_bytecode_table_index = IntPtrConstant(
1262 static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
1263
1264 Node* counter_offset =
1265 WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode),
1266 IntPtrConstant(kPointerSizeLog2));
1267 Node* old_counter =
1268 Load(MachineType::IntPtr(), counters_table, counter_offset);
1269
1270 Label counter_ok(this), counter_saturated(this, Label::kDeferred);
1271
1272 Node* counter_reached_max = WordEqual(
1273 old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
1274 BranchIf(counter_reached_max, &counter_saturated, &counter_ok);
1275
1276 Bind(&counter_ok);
1277 {
1278 Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
1279 StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
1280 counter_offset, new_counter);
1281 Goto(&counter_saturated);
1282 }
1283
1284 Bind(&counter_saturated);
1285}
1286
Ben Murdoch109988c2016-05-18 11:27:45 +01001287// static
1288bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
1289#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
1290 return false;
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001291#elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \
Ben Murdochf91f0612016-11-29 16:50:11 +00001292 V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || \
1293 V8_TARGET_ARCH_PPC
Ben Murdoch109988c2016-05-18 11:27:45 +01001294 return true;
1295#else
1296#error "Unknown Architecture"
1297#endif
1298}
1299
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001300Node* InterpreterAssembler::RegisterCount() {
1301 Node* bytecode_array = LoadRegister(Register::bytecode_array());
1302 Node* frame_size = LoadObjectField(
1303 bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32());
1304 return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2));
1305}
1306
1307Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
1308 if (FLAG_debug_code) {
Ben Murdochf91f0612016-11-29 16:50:11 +00001309 Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001310 AbortIfWordNotEqual(
1311 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
1312 }
1313
1314 Variable var_index(this, MachineRepresentation::kWord32);
1315 var_index.Bind(Int32Constant(0));
1316
1317 // Iterate over register file and write values into array.
1318 // The mapping of register to array index must match that used in
1319 // BytecodeGraphBuilder::VisitResumeGenerator.
1320 Label loop(this, &var_index), done_loop(this);
1321 Goto(&loop);
1322 Bind(&loop);
1323 {
1324 Node* index = var_index.value();
1325 Node* condition = Int32LessThan(index, RegisterCount());
1326 GotoUnless(condition, &done_loop);
1327
1328 Node* reg_index =
1329 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
1330 Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index));
1331
1332 StoreFixedArrayElement(array, index, value);
1333
1334 var_index.Bind(Int32Add(index, Int32Constant(1)));
1335 Goto(&loop);
1336 }
1337 Bind(&done_loop);
1338
1339 return array;
1340}
1341
1342Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
1343 if (FLAG_debug_code) {
Ben Murdochf91f0612016-11-29 16:50:11 +00001344 Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001345 AbortIfWordNotEqual(
1346 array_size, RegisterCount(), kInvalidRegisterFileInGenerator);
1347 }
1348
1349 Variable var_index(this, MachineRepresentation::kWord32);
1350 var_index.Bind(Int32Constant(0));
1351
1352 // Iterate over array and write values into register file. Also erase the
1353 // array contents to not keep them alive artificially.
1354 Label loop(this, &var_index), done_loop(this);
1355 Goto(&loop);
1356 Bind(&loop);
1357 {
1358 Node* index = var_index.value();
1359 Node* condition = Int32LessThan(index, RegisterCount());
1360 GotoUnless(condition, &done_loop);
1361
1362 Node* value = LoadFixedArrayElement(array, index);
1363
1364 Node* reg_index =
1365 Int32Sub(Int32Constant(Register(0).ToOperand()), index);
1366 StoreRegister(value, ChangeInt32ToIntPtr(reg_index));
1367
1368 StoreFixedArrayElement(array, index, StaleRegisterConstant());
1369
1370 var_index.Bind(Int32Add(index, Int32Constant(1)));
1371 Goto(&loop);
1372 }
1373 Bind(&done_loop);
1374
1375 return array;
1376}
1377
Ben Murdoch109988c2016-05-18 11:27:45 +01001378} // namespace interpreter
1379} // namespace internal
1380} // namespace v8