| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "src/interpreter/interpreter-assembler.h" |
| 6 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 7 | #include <limits> |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 8 | #include <ostream> |
| 9 | |
| 10 | #include "src/code-factory.h" |
| 11 | #include "src/frames.h" |
| 12 | #include "src/interface-descriptors.h" |
| 13 | #include "src/interpreter/bytecodes.h" |
| 14 | #include "src/interpreter/interpreter.h" |
| 15 | #include "src/machine-type.h" |
| 16 | #include "src/macro-assembler.h" |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 17 | #include "src/zone/zone.h" |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 18 | |
| 19 | namespace v8 { |
| 20 | namespace internal { |
| 21 | namespace interpreter { |
| 22 | |
| 23 | using compiler::Node; |
| 24 | |
| 25 | InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 26 | Bytecode bytecode, |
| 27 | OperandScale operand_scale) |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 28 | : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate), |
| 29 | Code::ComputeFlags(Code::BYTECODE_HANDLER), |
| 30 | Bytecodes::ToString(bytecode), |
| 31 | Bytecodes::ReturnCount(bytecode)), |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 32 | bytecode_(bytecode), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 33 | operand_scale_(operand_scale), |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 34 | bytecode_offset_(this, MachineType::PointerRepresentation()), |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 35 | interpreted_frame_pointer_(this, MachineType::PointerRepresentation()), |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 36 | accumulator_(this, MachineRepresentation::kTagged), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 37 | accumulator_use_(AccumulatorUse::kNone), |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 38 | made_call_(false), |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 39 | disable_stack_check_across_call_(false), |
| 40 | stack_pointer_before_call_(nullptr) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 41 | accumulator_.Bind(Parameter(InterpreterDispatchDescriptor::kAccumulator)); |
| 42 | bytecode_offset_.Bind( |
| 43 | Parameter(InterpreterDispatchDescriptor::kBytecodeOffset)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 44 | if (FLAG_trace_ignition) { |
| 45 | TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); |
| 46 | } |
| 47 | } |
| 48 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 49 | InterpreterAssembler::~InterpreterAssembler() { |
| 50 | // If the following check fails the handler does not use the |
| 51 | // accumulator in the way described in the bytecode definitions in |
| 52 | // bytecodes.h. |
| 53 | DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_)); |
| 54 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 55 | |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 56 | Node* InterpreterAssembler::GetInterpretedFramePointer() { |
| 57 | if (!interpreted_frame_pointer_.IsBound()) { |
| 58 | interpreted_frame_pointer_.Bind(LoadParentFramePointer()); |
| 59 | } |
| 60 | return interpreted_frame_pointer_.value(); |
| 61 | } |
| 62 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 63 | Node* InterpreterAssembler::GetAccumulatorUnchecked() { |
| 64 | return accumulator_.value(); |
| 65 | } |
| 66 | |
| 67 | Node* InterpreterAssembler::GetAccumulator() { |
| 68 | DCHECK(Bytecodes::ReadsAccumulator(bytecode_)); |
| 69 | accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead; |
| 70 | return GetAccumulatorUnchecked(); |
| 71 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 72 | |
| 73 | void InterpreterAssembler::SetAccumulator(Node* value) { |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 74 | DCHECK(Bytecodes::WritesAccumulator(bytecode_)); |
| 75 | accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 76 | accumulator_.Bind(value); |
| 77 | } |
| 78 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 79 | Node* InterpreterAssembler::GetContext() { |
| 80 | return LoadRegister(Register::current_context()); |
| 81 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 82 | |
| 83 | void InterpreterAssembler::SetContext(Node* value) { |
| 84 | StoreRegister(value, Register::current_context()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 85 | } |
| 86 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 87 | Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) { |
| 88 | Variable cur_context(this, MachineRepresentation::kTaggedPointer); |
| 89 | cur_context.Bind(context); |
| 90 | |
| 91 | Variable cur_depth(this, MachineRepresentation::kWord32); |
| 92 | cur_depth.Bind(depth); |
| 93 | |
| 94 | Label context_found(this); |
| 95 | |
| 96 | Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context}; |
| 97 | Label context_search(this, 2, context_search_loop_variables); |
| 98 | |
| 99 | // Fast path if the depth is 0. |
| 100 | BranchIfWord32Equal(depth, Int32Constant(0), &context_found, &context_search); |
| 101 | |
| 102 | // Loop until the depth is 0. |
| 103 | Bind(&context_search); |
| 104 | { |
| 105 | cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1))); |
| 106 | cur_context.Bind( |
| 107 | LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX)); |
| 108 | |
| 109 | BranchIfWord32Equal(cur_depth.value(), Int32Constant(0), &context_found, |
| 110 | &context_search); |
| 111 | } |
| 112 | |
| 113 | Bind(&context_found); |
| 114 | return cur_context.value(); |
| 115 | } |
| 116 | |
| 117 | void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context, |
| 118 | Node* depth, |
| 119 | Label* target) { |
| 120 | Variable cur_context(this, MachineRepresentation::kTaggedPointer); |
| 121 | cur_context.Bind(context); |
| 122 | |
| 123 | Variable cur_depth(this, MachineRepresentation::kWord32); |
| 124 | cur_depth.Bind(depth); |
| 125 | |
| 126 | Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context}; |
| 127 | Label context_search(this, 2, context_search_loop_variables); |
| 128 | |
| 129 | // Loop until the depth is 0. |
| 130 | Goto(&context_search); |
| 131 | Bind(&context_search); |
| 132 | { |
| 133 | // TODO(leszeks): We only need to do this check if the context had a sloppy |
| 134 | // eval, we could pass in a context chain bitmask to figure out which |
| 135 | // contexts actually need to be checked. |
| 136 | |
| 137 | Node* extension_slot = |
| 138 | LoadContextSlot(cur_context.value(), Context::EXTENSION_INDEX); |
| 139 | |
| 140 | // Jump to the target if the extension slot is not a hole. |
| 141 | GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target); |
| 142 | |
| 143 | cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1))); |
| 144 | cur_context.Bind( |
| 145 | LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX)); |
| 146 | |
| 147 | GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)), |
| 148 | &context_search); |
| 149 | } |
| 150 | } |
| 151 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 152 | Node* InterpreterAssembler::BytecodeOffset() { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 153 | return bytecode_offset_.value(); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 154 | } |
| 155 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 156 | Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 157 | if (made_call_) { |
| 158 | // If we have made a call, restore bytecode array from stack frame in case |
| 159 | // the debugger has swapped us to the patched debugger bytecode array. |
| 160 | return LoadRegister(Register::bytecode_array()); |
| 161 | } else { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 162 | return Parameter(InterpreterDispatchDescriptor::kBytecodeArray); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 163 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 164 | } |
| 165 | |
| 166 | Node* InterpreterAssembler::DispatchTableRawPointer() { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 167 | return Parameter(InterpreterDispatchDescriptor::kDispatchTable); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 168 | } |
| 169 | |
| 170 | Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 171 | return IntPtrAdd(GetInterpretedFramePointer(), |
| 172 | RegisterFrameOffset(reg_index)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 173 | } |
| 174 | |
| 175 | Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { |
| 176 | return WordShl(index, kPointerSizeLog2); |
| 177 | } |
| 178 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 179 | Node* InterpreterAssembler::LoadRegister(Register reg) { |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 180 | return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(), |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 181 | IntPtrConstant(reg.ToOperand() << kPointerSizeLog2)); |
| 182 | } |
| 183 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 184 | Node* InterpreterAssembler::LoadRegister(Node* reg_index) { |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 185 | return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(), |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 186 | RegisterFrameOffset(reg_index)); |
| 187 | } |
| 188 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 189 | Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 190 | return StoreNoWriteBarrier( |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 191 | MachineRepresentation::kTagged, GetInterpretedFramePointer(), |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 192 | IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 193 | } |
| 194 | |
| 195 | Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { |
| 196 | return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 197 | GetInterpretedFramePointer(), |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 198 | RegisterFrameOffset(reg_index), value); |
| 199 | } |
| 200 | |
| 201 | Node* InterpreterAssembler::NextRegister(Node* reg_index) { |
| 202 | // Register indexes are negative, so the next index is minus one. |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 203 | return IntPtrAdd(reg_index, IntPtrConstant(-1)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 204 | } |
| 205 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 206 | Node* InterpreterAssembler::OperandOffset(int operand_index) { |
| 207 | return IntPtrConstant( |
| 208 | Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale())); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 209 | } |
| 210 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 211 | Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 212 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 213 | DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( |
| 214 | bytecode_, operand_index, operand_scale())); |
| 215 | Node* operand_offset = OperandOffset(operand_index); |
| 216 | return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 217 | IntPtrAdd(BytecodeOffset(), operand_offset)); |
| 218 | } |
| 219 | |
| 220 | Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) { |
| 221 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 222 | DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( |
| 223 | bytecode_, operand_index, operand_scale())); |
| 224 | Node* operand_offset = OperandOffset(operand_index); |
| 225 | Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 226 | IntPtrAdd(BytecodeOffset(), operand_offset)); |
| 227 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 228 | // Ensure that we sign extend to full pointer size |
| 229 | if (kPointerSize == 8) { |
| 230 | load = ChangeInt32ToInt64(load); |
| 231 | } |
| 232 | return load; |
| 233 | } |
| 234 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 235 | compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned( |
| 236 | int relative_offset, MachineType result_type) { |
| 237 | static const int kMaxCount = 4; |
| 238 | DCHECK(!TargetSupportsUnalignedAccess()); |
| 239 | |
| 240 | int count; |
| 241 | switch (result_type.representation()) { |
| 242 | case MachineRepresentation::kWord16: |
| 243 | count = 2; |
| 244 | break; |
| 245 | case MachineRepresentation::kWord32: |
| 246 | count = 4; |
| 247 | break; |
| 248 | default: |
| 249 | UNREACHABLE(); |
| 250 | break; |
| 251 | } |
| 252 | MachineType msb_type = |
| 253 | result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8(); |
| 254 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 255 | #if V8_TARGET_LITTLE_ENDIAN |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 256 | const int kStep = -1; |
| 257 | int msb_offset = count - 1; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 258 | #elif V8_TARGET_BIG_ENDIAN |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 259 | const int kStep = 1; |
| 260 | int msb_offset = 0; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 261 | #else |
| 262 | #error "Unknown Architecture" |
| 263 | #endif |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 264 | |
| 265 | // Read the most signicant bytecode into bytes[0] and then in order |
| 266 | // down to least significant in bytes[count - 1]. |
| 267 | DCHECK(count <= kMaxCount); |
| 268 | compiler::Node* bytes[kMaxCount]; |
| 269 | for (int i = 0; i < count; i++) { |
| 270 | MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8(); |
| 271 | Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep); |
| 272 | Node* array_offset = IntPtrAdd(BytecodeOffset(), offset); |
| 273 | bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset); |
| 274 | } |
| 275 | |
| 276 | // Pack LSB to MSB. |
| 277 | Node* result = bytes[--count]; |
| 278 | for (int i = 1; --count >= 0; i++) { |
| 279 | Node* shift = Int32Constant(i * kBitsPerByte); |
| 280 | Node* value = Word32Shl(bytes[count], shift); |
| 281 | result = Word32Or(value, result); |
| 282 | } |
| 283 | return result; |
| 284 | } |
| 285 | |
| 286 | Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) { |
| 287 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 288 | DCHECK_EQ( |
| 289 | OperandSize::kShort, |
| 290 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); |
| 291 | int operand_offset = |
| 292 | Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); |
| 293 | if (TargetSupportsUnalignedAccess()) { |
| 294 | return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(), |
| 295 | IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); |
| 296 | } else { |
| 297 | return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 298 | } |
| 299 | } |
| 300 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 301 | Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 302 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 303 | DCHECK_EQ( |
| 304 | OperandSize::kShort, |
| 305 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); |
| 306 | int operand_offset = |
| 307 | Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 308 | Node* load; |
| 309 | if (TargetSupportsUnalignedAccess()) { |
| 310 | load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 311 | IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 312 | } else { |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 313 | load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 314 | } |
| 315 | |
| 316 | // Ensure that we sign extend to full pointer size |
| 317 | if (kPointerSize == 8) { |
| 318 | load = ChangeInt32ToInt64(load); |
| 319 | } |
| 320 | return load; |
| 321 | } |
| 322 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 323 | Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) { |
| 324 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 325 | DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( |
| 326 | bytecode_, operand_index, operand_scale())); |
| 327 | int operand_offset = |
| 328 | Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); |
| 329 | if (TargetSupportsUnalignedAccess()) { |
| 330 | return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(), |
| 331 | IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); |
| 332 | } else { |
| 333 | return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32()); |
| 334 | } |
| 335 | } |
| 336 | |
| 337 | Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) { |
| 338 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 339 | DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( |
| 340 | bytecode_, operand_index, operand_scale())); |
| 341 | int operand_offset = |
| 342 | Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); |
| 343 | Node* load; |
| 344 | if (TargetSupportsUnalignedAccess()) { |
| 345 | load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), |
| 346 | IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); |
| 347 | } else { |
| 348 | load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32()); |
| 349 | } |
| 350 | |
| 351 | // Ensure that we sign extend to full pointer size |
| 352 | if (kPointerSize == 8) { |
| 353 | load = ChangeInt32ToInt64(load); |
| 354 | } |
| 355 | return load; |
| 356 | } |
| 357 | |
| 358 | Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index, |
| 359 | OperandSize operand_size) { |
| 360 | DCHECK(!Bytecodes::IsUnsignedOperandType( |
| 361 | Bytecodes::GetOperandType(bytecode_, operand_index))); |
| 362 | switch (operand_size) { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 363 | case OperandSize::kByte: |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 364 | return BytecodeOperandSignedByte(operand_index); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 365 | case OperandSize::kShort: |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 366 | return BytecodeOperandSignedShort(operand_index); |
| 367 | case OperandSize::kQuad: |
| 368 | return BytecodeOperandSignedQuad(operand_index); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 369 | case OperandSize::kNone: |
| 370 | UNREACHABLE(); |
| 371 | } |
| 372 | return nullptr; |
| 373 | } |
| 374 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 375 | Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index, |
| 376 | OperandSize operand_size) { |
| 377 | DCHECK(Bytecodes::IsUnsignedOperandType( |
| 378 | Bytecodes::GetOperandType(bytecode_, operand_index))); |
| 379 | switch (operand_size) { |
| 380 | case OperandSize::kByte: |
| 381 | return BytecodeOperandUnsignedByte(operand_index); |
| 382 | case OperandSize::kShort: |
| 383 | return BytecodeOperandUnsignedShort(operand_index); |
| 384 | case OperandSize::kQuad: |
| 385 | return BytecodeOperandUnsignedQuad(operand_index); |
| 386 | case OperandSize::kNone: |
| 387 | UNREACHABLE(); |
| 388 | } |
| 389 | return nullptr; |
| 390 | } |
| 391 | |
| 392 | Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { |
| 393 | DCHECK_EQ(OperandType::kRegCount, |
| 394 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 395 | OperandSize operand_size = |
| 396 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 397 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| 398 | } |
| 399 | |
| 400 | Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) { |
| 401 | DCHECK_EQ(OperandType::kFlag8, |
| 402 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 403 | OperandSize operand_size = |
| 404 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 405 | DCHECK_EQ(operand_size, OperandSize::kByte); |
| 406 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| 407 | } |
| 408 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 409 | Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) { |
| 410 | DCHECK_EQ(OperandType::kUImm, |
| 411 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 412 | OperandSize operand_size = |
| 413 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 414 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| 415 | } |
| 416 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 417 | Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 418 | DCHECK_EQ(OperandType::kImm, |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 419 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 420 | OperandSize operand_size = |
| 421 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 422 | return BytecodeSignedOperand(operand_index, operand_size); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 423 | } |
| 424 | |
| 425 | Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 426 | DCHECK(OperandType::kIdx == |
| 427 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 428 | OperandSize operand_size = |
| 429 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 430 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 431 | } |
| 432 | |
| 433 | Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 434 | DCHECK(Bytecodes::IsRegisterOperandType( |
| 435 | Bytecodes::GetOperandType(bytecode_, operand_index))); |
| 436 | OperandSize operand_size = |
| 437 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 438 | return BytecodeSignedOperand(operand_index, operand_size); |
| 439 | } |
| 440 | |
| 441 | Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) { |
| 442 | DCHECK(OperandType::kRuntimeId == |
| 443 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 444 | OperandSize operand_size = |
| 445 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 446 | DCHECK_EQ(operand_size, OperandSize::kShort); |
| 447 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 448 | } |
| 449 | |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 450 | Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) { |
| 451 | DCHECK(OperandType::kIntrinsicId == |
| 452 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 453 | OperandSize operand_size = |
| 454 | Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 455 | DCHECK_EQ(operand_size, OperandSize::kByte); |
| 456 | return BytecodeUnsignedOperand(operand_index, operand_size); |
| 457 | } |
| 458 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 459 | Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 460 | Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 461 | BytecodeArray::kConstantPoolOffset); |
| 462 | Node* entry_offset = |
| 463 | IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 464 | WordShl(index, kPointerSizeLog2)); |
| 465 | return Load(MachineType::AnyTagged(), constant_pool, entry_offset); |
| 466 | } |
| 467 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 468 | Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) { |
| 469 | Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 470 | BytecodeArray::kConstantPoolOffset); |
| 471 | int offset = FixedArray::kHeaderSize - kHeapObjectTag; |
| 472 | #if V8_TARGET_LITTLE_ENDIAN |
| 473 | if (Is64()) { |
| 474 | offset += kPointerSize / 2; |
| 475 | } |
| 476 | #endif |
| 477 | Node* entry_offset = |
| 478 | IntPtrAdd(IntPtrConstant(offset), WordShl(index, kPointerSizeLog2)); |
| 479 | if (Is64()) { |
| 480 | return ChangeInt32ToInt64( |
| 481 | Load(MachineType::Int32(), constant_pool, entry_offset)); |
| 482 | } else { |
| 483 | return SmiUntag( |
| 484 | Load(MachineType::AnyTagged(), constant_pool, entry_offset)); |
| 485 | } |
| 486 | } |
| 487 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 488 | Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { |
| 489 | return Load(MachineType::AnyTagged(), context, |
| 490 | IntPtrConstant(Context::SlotOffset(slot_index))); |
| 491 | } |
| 492 | |
| 493 | Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { |
| 494 | Node* offset = |
| 495 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 496 | IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 497 | return Load(MachineType::AnyTagged(), context, offset); |
| 498 | } |
| 499 | |
| 500 | Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, |
| 501 | Node* value) { |
| 502 | Node* offset = |
| 503 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 504 | IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 505 | return Store(MachineRepresentation::kTagged, context, offset, value); |
| 506 | } |
| 507 | |
| 508 | Node* InterpreterAssembler::LoadTypeFeedbackVector() { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 509 | Node* function = LoadRegister(Register::function_closure()); |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 510 | Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 511 | Node* vector = |
| Ben Murdoch | 13e2dad | 2016-09-16 13:49:30 +0100 | [diff] [blame] | 512 | LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 513 | return vector; |
| 514 | } |
| 515 | |
| 516 | void InterpreterAssembler::CallPrologue() { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 517 | StoreRegister(SmiTag(BytecodeOffset()), Register::bytecode_offset()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 518 | |
| 519 | if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 520 | DCHECK(stack_pointer_before_call_ == nullptr); |
| 521 | stack_pointer_before_call_ = LoadStackPointer(); |
| 522 | } |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 523 | made_call_ = true; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 524 | } |
| 525 | |
| 526 | void InterpreterAssembler::CallEpilogue() { |
| 527 | if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 528 | Node* stack_pointer_after_call = LoadStackPointer(); |
| 529 | Node* stack_pointer_before_call = stack_pointer_before_call_; |
| 530 | stack_pointer_before_call_ = nullptr; |
| 531 | AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 532 | kUnexpectedStackPointer); |
| 533 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 534 | } |
| 535 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 536 | Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector, |
| 537 | Node* slot_id) { |
| 538 | Comment("increment call count"); |
| 539 | Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); |
| 540 | Node* call_count = |
| 541 | LoadFixedArrayElement(type_feedback_vector, call_count_slot); |
| 542 | Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1))); |
| 543 | // Count is Smi, so we don't need a write barrier. |
| 544 | return StoreFixedArrayElement(type_feedback_vector, call_count_slot, |
| 545 | new_count, SKIP_WRITE_BARRIER); |
| 546 | } |
| 547 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 548 | Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, |
| 549 | Node* first_arg, Node* arg_count, |
| 550 | Node* slot_id, |
| 551 | Node* type_feedback_vector, |
| 552 | TailCallMode tail_call_mode) { |
| 553 | // Static checks to assert it is safe to examine the type feedback element. |
| 554 | // We don't know that we have a weak cell. We might have a private symbol |
| 555 | // or an AllocationSite, but the memory is safe to examine. |
| 556 | // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to |
| 557 | // FixedArray. |
| 558 | // WeakCell::kValueOffset - contains a JSFunction or Smi(0) |
| 559 | // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not |
| 560 | // computed, meaning that it can't appear to be a pointer. If the low bit is |
| 561 | // 0, then hash is computed, but the 0 bit prevents the field from appearing |
| 562 | // to be a pointer. |
| 563 | STATIC_ASSERT(WeakCell::kSize >= kPointerSize); |
| 564 | STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == |
| 565 | WeakCell::kValueOffset && |
| 566 | WeakCell::kValueOffset == Symbol::kHashFieldSlot); |
| 567 | |
| 568 | Variable return_value(this, MachineRepresentation::kTagged); |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 569 | Label handle_monomorphic(this), extra_checks(this), end(this), call(this), |
| 570 | call_function(this), call_without_feedback(this); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 571 | |
| 572 | // Slot id of 0 is used to indicate no typefeedback is available. Call using |
| 573 | // call builtin. |
| 574 | STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0); |
| 575 | Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0)); |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 576 | GotoIf(is_feedback_unavailable, &call_without_feedback); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 577 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 578 | // The checks. First, does function match the recorded monomorphic target? |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 579 | Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); |
| 580 | Node* feedback_value = LoadWeakCellValue(feedback_element); |
| 581 | Node* is_monomorphic = WordEqual(function, feedback_value); |
| 582 | BranchIf(is_monomorphic, &handle_monomorphic, &extra_checks); |
| 583 | |
| 584 | Bind(&handle_monomorphic); |
| 585 | { |
| 586 | // The compare above could have been a SMI/SMI comparison. Guard against |
| 587 | // this convincing us that we have a monomorphic JSFunction. |
| 588 | Node* is_smi = WordIsSmi(function); |
| 589 | GotoIf(is_smi, &extra_checks); |
| 590 | |
| 591 | // Increment the call count. |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 592 | IncrementCallCount(type_feedback_vector, slot_id); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 593 | |
| 594 | // Call using call function builtin. |
| 595 | Callable callable = CodeFactory::InterpreterPushArgsAndCall( |
| 596 | isolate(), tail_call_mode, CallableType::kJSFunction); |
| 597 | Node* code_target = HeapConstant(callable.code()); |
| 598 | Node* ret_value = CallStub(callable.descriptor(), code_target, context, |
| 599 | arg_count, first_arg, function); |
| 600 | return_value.Bind(ret_value); |
| 601 | Goto(&end); |
| 602 | } |
| 603 | |
| 604 | Bind(&extra_checks); |
| 605 | { |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 606 | Label check_initialized(this, Label::kDeferred), mark_megamorphic(this), |
| 607 | check_allocation_site(this), |
| 608 | create_allocation_site(this, Label::kDeferred); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 609 | // Check if it is a megamorphic target |
| 610 | Node* is_megamorphic = WordEqual( |
| 611 | feedback_element, |
| 612 | HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 613 | BranchIf(is_megamorphic, &call, &check_allocation_site); |
| 614 | |
| 615 | Bind(&check_allocation_site); |
| 616 | { |
| 617 | Node* is_allocation_site = |
| 618 | WordEqual(LoadMap(feedback_element), |
| 619 | LoadRoot(Heap::kAllocationSiteMapRootIndex)); |
| 620 | GotoUnless(is_allocation_site, &check_initialized); |
| 621 | |
| 622 | // If it is not the Array() function, mark megamorphic. |
| 623 | Node* context_slot = |
| 624 | LoadFixedArrayElement(LoadNativeContext(context), |
| 625 | Int32Constant(Context::ARRAY_FUNCTION_INDEX)); |
| 626 | Node* is_array_function = WordEqual(context_slot, function); |
| 627 | GotoUnless(is_array_function, &mark_megamorphic); |
| 628 | |
| 629 | // It is a monomorphic Array function. Increment the call count. |
| 630 | IncrementCallCount(type_feedback_vector, slot_id); |
| 631 | |
| 632 | // Call ArrayConstructorStub. |
| 633 | Callable callable_call = |
| 634 | CodeFactory::InterpreterPushArgsAndConstructArray(isolate()); |
| 635 | Node* code_target_call = HeapConstant(callable_call.code()); |
| 636 | Node* ret_value = |
| 637 | CallStub(callable_call.descriptor(), code_target_call, context, |
| 638 | arg_count, function, feedback_element, first_arg); |
| 639 | return_value.Bind(ret_value); |
| 640 | Goto(&end); |
| 641 | } |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 642 | |
| 643 | Bind(&check_initialized); |
| 644 | { |
| 645 | Label possibly_monomorphic(this); |
| 646 | // Check if it is uninitialized. |
| 647 | Node* is_uninitialized = WordEqual( |
| 648 | feedback_element, |
| 649 | HeapConstant(TypeFeedbackVector::UninitializedSentinel(isolate()))); |
| 650 | GotoUnless(is_uninitialized, &mark_megamorphic); |
| 651 | |
| 652 | Node* is_smi = WordIsSmi(function); |
| 653 | GotoIf(is_smi, &mark_megamorphic); |
| 654 | |
| 655 | // Check if function is an object of JSFunction type |
| 656 | Node* instance_type = LoadInstanceType(function); |
| 657 | Node* is_js_function = |
| 658 | WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE)); |
| 659 | GotoUnless(is_js_function, &mark_megamorphic); |
| 660 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 661 | // Check if it is the Array() function. |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 662 | Node* context_slot = |
| 663 | LoadFixedArrayElement(LoadNativeContext(context), |
| 664 | Int32Constant(Context::ARRAY_FUNCTION_INDEX)); |
| 665 | Node* is_array_function = WordEqual(context_slot, function); |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 666 | GotoIf(is_array_function, &create_allocation_site); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 667 | |
| 668 | // Check if the function belongs to the same native context |
| 669 | Node* native_context = LoadNativeContext( |
| 670 | LoadObjectField(function, JSFunction::kContextOffset)); |
| 671 | Node* is_same_native_context = |
| 672 | WordEqual(native_context, LoadNativeContext(context)); |
| 673 | GotoUnless(is_same_native_context, &mark_megamorphic); |
| 674 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 675 | CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id), |
| 676 | function); |
| 677 | |
| 678 | // Call using call function builtin. |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 679 | Goto(&call_function); |
| 680 | } |
| 681 | |
| 682 | Bind(&create_allocation_site); |
| 683 | { |
| 684 | CreateAllocationSiteInFeedbackVector(type_feedback_vector, |
| 685 | SmiTag(slot_id)); |
| 686 | |
| 687 | // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state. |
| 688 | // They start collecting feedback only when a call is executed the second |
| 689 | // time. So, do not pass any feedback here. |
| 690 | Goto(&call_function); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 691 | } |
| 692 | |
| 693 | Bind(&mark_megamorphic); |
| 694 | { |
| 695 | // Mark it as a megamorphic. |
| 696 | // MegamorphicSentinel is created as a part of Heap::InitialObjects |
| 697 | // and will not move during a GC. So it is safe to skip write barrier. |
| 698 | DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); |
| 699 | StoreFixedArrayElement( |
| 700 | type_feedback_vector, slot_id, |
| 701 | HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), |
| 702 | SKIP_WRITE_BARRIER); |
| 703 | Goto(&call); |
| 704 | } |
| 705 | } |
| 706 | |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 707 | Bind(&call_function); |
| 708 | { |
| 709 | // Increment the call count. |
| 710 | IncrementCallCount(type_feedback_vector, slot_id); |
| 711 | |
| 712 | Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 713 | isolate(), tail_call_mode, CallableType::kJSFunction); |
| 714 | Node* code_target_call = HeapConstant(callable_call.code()); |
| 715 | Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 716 | context, arg_count, first_arg, function); |
| 717 | return_value.Bind(ret_value); |
| 718 | Goto(&end); |
| 719 | } |
| 720 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 721 | Bind(&call); |
| 722 | { |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 723 | // Increment the call count. |
| 724 | IncrementCallCount(type_feedback_vector, slot_id); |
| 725 | |
| 726 | // Call using call builtin. |
| 727 | Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 728 | isolate(), tail_call_mode, CallableType::kAny); |
| 729 | Node* code_target_call = HeapConstant(callable_call.code()); |
| 730 | Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 731 | context, arg_count, first_arg, function); |
| 732 | return_value.Bind(ret_value); |
| 733 | Goto(&end); |
| 734 | } |
| 735 | |
| 736 | Bind(&call_without_feedback); |
| 737 | { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 738 | // Call using call builtin. |
| 739 | Callable callable_call = CodeFactory::InterpreterPushArgsAndCall( |
| 740 | isolate(), tail_call_mode, CallableType::kAny); |
| 741 | Node* code_target_call = HeapConstant(callable_call.code()); |
| 742 | Node* ret_value = CallStub(callable_call.descriptor(), code_target_call, |
| 743 | context, arg_count, first_arg, function); |
| 744 | return_value.Bind(ret_value); |
| 745 | Goto(&end); |
| 746 | } |
| 747 | |
| 748 | Bind(&end); |
| 749 | return return_value.value(); |
| 750 | } |
| 751 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 752 | Node* InterpreterAssembler::CallJS(Node* function, Node* context, |
| 753 | Node* first_arg, Node* arg_count, |
| 754 | TailCallMode tail_call_mode) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 755 | Callable callable = CodeFactory::InterpreterPushArgsAndCall( |
| 756 | isolate(), tail_call_mode, CallableType::kAny); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 757 | Node* code_target = HeapConstant(callable.code()); |
| 758 | return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 759 | first_arg, function); |
| 760 | } |
| 761 | |
| 762 | Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context, |
| 763 | Node* new_target, Node* first_arg, |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 764 | Node* arg_count, Node* slot_id, |
| 765 | Node* type_feedback_vector) { |
| 766 | Label call_construct(this), js_function(this), end(this); |
| 767 | Variable return_value(this, MachineRepresentation::kTagged); |
| 768 | Variable allocation_feedback(this, MachineRepresentation::kTagged); |
| 769 | allocation_feedback.Bind(UndefinedConstant()); |
| 770 | |
| 771 | // Slot id of 0 is used to indicate no type feedback is available. |
| 772 | STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0); |
| 773 | Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0)); |
| 774 | GotoIf(is_feedback_unavailable, &call_construct); |
| 775 | |
| 776 | // Check that the constructor is not a smi. |
| 777 | Node* is_smi = WordIsSmi(constructor); |
| 778 | GotoIf(is_smi, &call_construct); |
| 779 | |
| 780 | // Check that constructor is a JSFunction. |
| 781 | Node* instance_type = LoadInstanceType(constructor); |
| 782 | Node* is_js_function = |
| 783 | WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE)); |
| 784 | BranchIf(is_js_function, &js_function, &call_construct); |
| 785 | |
| 786 | Bind(&js_function); |
| 787 | { |
| 788 | // Cache the called function in a feedback vector slot. Cache states |
| 789 | // are uninitialized, monomorphic (indicated by a JSFunction), and |
| 790 | // megamorphic. |
| 791 | // TODO(mythria/v8:5210): Check if it is better to mark extra_checks as a |
| 792 | // deferred block so that call_construct_function will be scheduled. |
| 793 | Label extra_checks(this), call_construct_function(this); |
| 794 | |
| 795 | Node* feedback_element = |
| 796 | LoadFixedArrayElement(type_feedback_vector, slot_id); |
| 797 | Node* feedback_value = LoadWeakCellValue(feedback_element); |
| 798 | Node* is_monomorphic = WordEqual(constructor, feedback_value); |
| 799 | BranchIf(is_monomorphic, &call_construct_function, &extra_checks); |
| 800 | |
| 801 | Bind(&extra_checks); |
| 802 | { |
| 803 | Label mark_megamorphic(this), initialize(this), |
| 804 | check_allocation_site(this), check_initialized(this), |
| 805 | set_alloc_feedback_and_call(this); |
| 806 | { |
| 807 | // Check if it is a megamorphic target |
| 808 | Comment("check if megamorphic"); |
| 809 | Node* is_megamorphic = WordEqual( |
| 810 | feedback_element, |
| 811 | HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
| 812 | GotoIf(is_megamorphic, &call_construct_function); |
| 813 | |
| 814 | Comment("check if weak cell"); |
| 815 | Node* is_weak_cell = WordEqual(LoadMap(feedback_element), |
| 816 | LoadRoot(Heap::kWeakCellMapRootIndex)); |
| 817 | GotoUnless(is_weak_cell, &check_allocation_site); |
| 818 | // If the weak cell is cleared, we have a new chance to become |
| 819 | // monomorphic. |
| 820 | Comment("check if weak cell is cleared"); |
| 821 | Node* is_smi = WordIsSmi(feedback_value); |
| 822 | BranchIf(is_smi, &initialize, &mark_megamorphic); |
| 823 | } |
| 824 | |
| 825 | Bind(&check_allocation_site); |
| 826 | { |
| 827 | Comment("check if it is an allocation site"); |
| 828 | Node* is_allocation_site = |
| 829 | WordEqual(LoadObjectField(feedback_element, 0), |
| 830 | LoadRoot(Heap::kAllocationSiteMapRootIndex)); |
| 831 | GotoUnless(is_allocation_site, &check_initialized); |
| 832 | |
| 833 | // Make sure the function is the Array() function |
| 834 | Node* context_slot = |
| 835 | LoadFixedArrayElement(LoadNativeContext(context), |
| 836 | Int32Constant(Context::ARRAY_FUNCTION_INDEX)); |
| 837 | Node* is_array_function = WordEqual(context_slot, constructor); |
| 838 | BranchIf(is_array_function, &set_alloc_feedback_and_call, |
| 839 | &mark_megamorphic); |
| 840 | } |
| 841 | |
| 842 | Bind(&set_alloc_feedback_and_call); |
| 843 | { |
| 844 | allocation_feedback.Bind(feedback_element); |
| 845 | Goto(&call_construct_function); |
| 846 | } |
| 847 | |
| 848 | Bind(&check_initialized); |
| 849 | { |
| 850 | // Check if it is uninitialized. |
| 851 | Comment("check if uninitialized"); |
| 852 | Node* is_uninitialized = WordEqual( |
| 853 | feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex)); |
| 854 | BranchIf(is_uninitialized, &initialize, &mark_megamorphic); |
| 855 | } |
| 856 | |
| 857 | Bind(&initialize); |
| 858 | { |
| 859 | Label create_weak_cell(this), create_allocation_site(this); |
| 860 | Comment("initialize the feedback element"); |
| 861 | // Check that it is the Array() function. |
| 862 | Node* context_slot = |
| 863 | LoadFixedArrayElement(LoadNativeContext(context), |
| 864 | Int32Constant(Context::ARRAY_FUNCTION_INDEX)); |
| 865 | Node* is_array_function = WordEqual(context_slot, constructor); |
| 866 | BranchIf(is_array_function, &create_allocation_site, &create_weak_cell); |
| 867 | |
| 868 | Bind(&create_allocation_site); |
| 869 | { |
| 870 | Node* site = CreateAllocationSiteInFeedbackVector( |
| 871 | type_feedback_vector, SmiTag(slot_id)); |
| 872 | allocation_feedback.Bind(site); |
| 873 | Goto(&call_construct_function); |
| 874 | } |
| 875 | |
| 876 | Bind(&create_weak_cell); |
| 877 | { |
| 878 | CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id), |
| 879 | constructor); |
| 880 | Goto(&call_construct_function); |
| 881 | } |
| 882 | } |
| 883 | |
| 884 | Bind(&mark_megamorphic); |
| 885 | { |
| 886 | // MegamorphicSentinel is an immortal immovable object so |
| 887 | // write-barrier is not needed. |
| 888 | Comment("transition to megamorphic"); |
| 889 | DCHECK( |
| 890 | Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); |
| 891 | StoreFixedArrayElement( |
| 892 | type_feedback_vector, slot_id, |
| 893 | HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), |
| 894 | SKIP_WRITE_BARRIER); |
| 895 | Goto(&call_construct_function); |
| 896 | } |
| 897 | } |
| 898 | |
| 899 | Bind(&call_construct_function); |
| 900 | { |
| 901 | Comment("call using callConstructFunction"); |
| 902 | IncrementCallCount(type_feedback_vector, slot_id); |
| 903 | Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct( |
| 904 | isolate(), CallableType::kJSFunction); |
| 905 | return_value.Bind(CallStub(callable_function.descriptor(), |
| 906 | HeapConstant(callable_function.code()), |
| 907 | context, arg_count, new_target, constructor, |
| 908 | allocation_feedback.value(), first_arg)); |
| 909 | Goto(&end); |
| 910 | } |
| 911 | } |
| 912 | |
| 913 | Bind(&call_construct); |
| 914 | { |
| 915 | Comment("call using callConstruct builtin"); |
| 916 | Callable callable = CodeFactory::InterpreterPushArgsAndConstruct( |
| 917 | isolate(), CallableType::kAny); |
| 918 | Node* code_target = HeapConstant(callable.code()); |
| 919 | return_value.Bind(CallStub(callable.descriptor(), code_target, context, |
| 920 | arg_count, new_target, constructor, |
| 921 | UndefinedConstant(), first_arg)); |
| 922 | Goto(&end); |
| 923 | } |
| 924 | |
| 925 | Bind(&end); |
| 926 | return return_value.value(); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 927 | } |
| 928 | |
| 929 | Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context, |
| 930 | Node* first_arg, Node* arg_count, |
| 931 | int result_size) { |
| 932 | Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); |
| 933 | Node* code_target = HeapConstant(callable.code()); |
| 934 | |
| 935 | // Get the function entry from the function id. |
| 936 | Node* function_table = ExternalConstant( |
| 937 | ExternalReference::runtime_function_table_address(isolate())); |
| 938 | Node* function_offset = |
| 939 | Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function))); |
| 940 | Node* function = IntPtrAdd(function_table, function_offset); |
| 941 | Node* function_entry = |
| 942 | Load(MachineType::Pointer(), function, |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 943 | IntPtrConstant(offsetof(Runtime::Function, entry))); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 944 | |
| 945 | return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 946 | first_arg, function_entry, result_size); |
| 947 | } |
| 948 | |
| 949 | void InterpreterAssembler::UpdateInterruptBudget(Node* weight) { |
| Ben Murdoch | f3b273f | 2017-01-17 12:11:28 +0000 | [diff] [blame^] | 950 | // TODO(rmcilroy): It might be worthwhile to only update the budget for |
| 951 | // backwards branches. Those are distinguishable by the {JumpLoop} bytecode. |
| 952 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 953 | Label ok(this), interrupt_check(this, Label::kDeferred), end(this); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 954 | Node* budget_offset = |
| 955 | IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag); |
| 956 | |
| 957 | // Update budget by |weight| and check if it reaches zero. |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 958 | Variable new_budget(this, MachineRepresentation::kWord32); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 959 | Node* old_budget = |
| 960 | Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 961 | new_budget.Bind(Int32Add(old_budget, weight)); |
| 962 | Node* condition = |
| 963 | Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 964 | Branch(condition, &ok, &interrupt_check); |
| 965 | |
| 966 | // Perform interrupt and reset budget. |
| 967 | Bind(&interrupt_check); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 968 | { |
| 969 | CallRuntime(Runtime::kInterrupt, GetContext()); |
| 970 | new_budget.Bind(Int32Constant(Interpreter::InterruptBudget())); |
| 971 | Goto(&ok); |
| 972 | } |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 973 | |
| 974 | // Update budget. |
| 975 | Bind(&ok); |
| 976 | StoreNoWriteBarrier(MachineRepresentation::kWord32, |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 977 | BytecodeArrayTaggedPointer(), budget_offset, |
| 978 | new_budget.value()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 979 | } |
| 980 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 981 | Node* InterpreterAssembler::Advance() { |
| 982 | return Advance(Bytecodes::Size(bytecode_, operand_scale_)); |
| 983 | } |
| 984 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 985 | Node* InterpreterAssembler::Advance(int delta) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 986 | return Advance(IntPtrConstant(delta)); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 987 | } |
| 988 | |
| 989 | Node* InterpreterAssembler::Advance(Node* delta) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 990 | if (FLAG_trace_ignition) { |
| 991 | TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); |
| 992 | } |
| 993 | Node* next_offset = IntPtrAdd(BytecodeOffset(), delta); |
| 994 | bytecode_offset_.Bind(next_offset); |
| 995 | return next_offset; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 996 | } |
| 997 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 998 | Node* InterpreterAssembler::Jump(Node* delta) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 999 | DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_)); |
| 1000 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1001 | UpdateInterruptBudget(delta); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1002 | Node* new_bytecode_offset = Advance(delta); |
| 1003 | Node* target_bytecode = LoadBytecode(new_bytecode_offset); |
| 1004 | return DispatchToBytecode(target_bytecode, new_bytecode_offset); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1005 | } |
| 1006 | |
| 1007 | void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1008 | Label match(this), no_match(this); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1009 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1010 | BranchIf(condition, &match, &no_match); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1011 | Bind(&match); |
| 1012 | Jump(delta); |
| 1013 | Bind(&no_match); |
| 1014 | Dispatch(); |
| 1015 | } |
| 1016 | |
| 1017 | void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { |
| 1018 | JumpConditional(WordEqual(lhs, rhs), delta); |
| 1019 | } |
| 1020 | |
| 1021 | void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, |
| 1022 | Node* delta) { |
| 1023 | JumpConditional(WordNotEqual(lhs, rhs), delta); |
| 1024 | } |
| 1025 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1026 | Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) { |
| 1027 | Node* bytecode = |
| 1028 | Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset); |
| 1029 | if (kPointerSize == 8) { |
| 1030 | bytecode = ChangeUint32ToUint64(bytecode); |
| 1031 | } |
| 1032 | return bytecode; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1033 | } |
| 1034 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1035 | Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) { |
| 1036 | Label do_inline_star(this), done(this); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1037 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1038 | Variable var_bytecode(this, MachineRepresentation::kWord8); |
| 1039 | var_bytecode.Bind(target_bytecode); |
| 1040 | |
| 1041 | Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar)); |
| 1042 | Node* is_star = WordEqual(target_bytecode, star_bytecode); |
| 1043 | BranchIf(is_star, &do_inline_star, &done); |
| 1044 | |
| 1045 | Bind(&do_inline_star); |
| 1046 | { |
| 1047 | InlineStar(); |
| 1048 | var_bytecode.Bind(LoadBytecode(BytecodeOffset())); |
| 1049 | Goto(&done); |
| 1050 | } |
| 1051 | Bind(&done); |
| 1052 | return var_bytecode.value(); |
| 1053 | } |
| 1054 | |
| 1055 | void InterpreterAssembler::InlineStar() { |
| 1056 | Bytecode previous_bytecode = bytecode_; |
| 1057 | AccumulatorUse previous_acc_use = accumulator_use_; |
| 1058 | |
| 1059 | bytecode_ = Bytecode::kStar; |
| 1060 | accumulator_use_ = AccumulatorUse::kNone; |
| 1061 | |
| 1062 | if (FLAG_trace_ignition) { |
| 1063 | TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); |
| 1064 | } |
| 1065 | StoreRegister(GetAccumulator(), BytecodeOperandReg(0)); |
| 1066 | |
| 1067 | DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_)); |
| 1068 | |
| 1069 | Advance(); |
| 1070 | bytecode_ = previous_bytecode; |
| 1071 | accumulator_use_ = previous_acc_use; |
| 1072 | } |
| 1073 | |
| 1074 | Node* InterpreterAssembler::Dispatch() { |
| 1075 | Node* target_offset = Advance(); |
| 1076 | Node* target_bytecode = LoadBytecode(target_offset); |
| 1077 | |
| 1078 | if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) { |
| 1079 | target_bytecode = StarDispatchLookahead(target_bytecode); |
| 1080 | } |
| 1081 | return DispatchToBytecode(target_bytecode, BytecodeOffset()); |
| 1082 | } |
| 1083 | |
| 1084 | Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode, |
| 1085 | Node* new_bytecode_offset) { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1086 | if (FLAG_trace_ignition_dispatches) { |
| 1087 | TraceBytecodeDispatch(target_bytecode); |
| 1088 | } |
| 1089 | |
| 1090 | Node* target_code_entry = |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1091 | Load(MachineType::Pointer(), DispatchTableRawPointer(), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1092 | WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2))); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1093 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1094 | return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1095 | } |
| 1096 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1097 | Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler, |
| 1098 | Node* bytecode_offset) { |
| 1099 | Node* handler_entry = |
| 1100 | IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); |
| 1101 | return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset); |
| 1102 | } |
| 1103 | |
| 1104 | Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry( |
| 1105 | Node* handler_entry, Node* bytecode_offset) { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1106 | InterpreterDispatchDescriptor descriptor(isolate()); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1107 | Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset, |
| 1108 | BytecodeArrayTaggedPointer(), DispatchTableRawPointer()}; |
| 1109 | return TailCallBytecodeDispatch(descriptor, handler_entry, args); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1110 | } |
| 1111 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1112 | void InterpreterAssembler::DispatchWide(OperandScale operand_scale) { |
| 1113 | // Dispatching a wide bytecode requires treating the prefix |
| 1114 | // bytecode a base pointer into the dispatch table and dispatching |
| 1115 | // the bytecode that follows relative to this base. |
| 1116 | // |
| 1117 | // Indices 0-255 correspond to bytecodes with operand_scale == 0 |
| 1118 | // Indices 256-511 correspond to bytecodes with operand_scale == 1 |
| 1119 | // Indices 512-767 correspond to bytecodes with operand_scale == 2 |
| 1120 | Node* next_bytecode_offset = Advance(1); |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1121 | Node* next_bytecode = LoadBytecode(next_bytecode_offset); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1122 | |
| 1123 | if (FLAG_trace_ignition_dispatches) { |
| 1124 | TraceBytecodeDispatch(next_bytecode); |
| 1125 | } |
| 1126 | |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1127 | Node* base_index; |
| 1128 | switch (operand_scale) { |
| 1129 | case OperandScale::kDouble: |
| 1130 | base_index = IntPtrConstant(1 << kBitsPerByte); |
| 1131 | break; |
| 1132 | case OperandScale::kQuadruple: |
| 1133 | base_index = IntPtrConstant(2 << kBitsPerByte); |
| 1134 | break; |
| 1135 | default: |
| 1136 | UNREACHABLE(); |
| 1137 | base_index = nullptr; |
| 1138 | } |
| 1139 | Node* target_index = IntPtrAdd(base_index, next_bytecode); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1140 | Node* target_code_entry = |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1141 | Load(MachineType::Pointer(), DispatchTableRawPointer(), |
| 1142 | WordShl(target_index, kPointerSizeLog2)); |
| 1143 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1144 | DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset); |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1145 | } |
| 1146 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1147 | Node* InterpreterAssembler::TruncateTaggedToWord32WithFeedback( |
| 1148 | Node* context, Node* value, Variable* var_type_feedback) { |
| 1149 | // We might need to loop once due to ToNumber conversion. |
| 1150 | Variable var_value(this, MachineRepresentation::kTagged), |
| 1151 | var_result(this, MachineRepresentation::kWord32); |
| 1152 | Variable* loop_vars[] = {&var_value, var_type_feedback}; |
| 1153 | Label loop(this, 2, loop_vars), done_loop(this, &var_result); |
| 1154 | var_value.Bind(value); |
| 1155 | var_type_feedback->Bind(Int32Constant(BinaryOperationFeedback::kNone)); |
| 1156 | Goto(&loop); |
| 1157 | Bind(&loop); |
| 1158 | { |
| 1159 | // Load the current {value}. |
| 1160 | value = var_value.value(); |
| 1161 | |
| 1162 | // Check if the {value} is a Smi or a HeapObject. |
| 1163 | Label if_valueissmi(this), if_valueisnotsmi(this); |
| 1164 | Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi); |
| 1165 | |
| 1166 | Bind(&if_valueissmi); |
| 1167 | { |
| 1168 | // Convert the Smi {value}. |
| 1169 | var_result.Bind(SmiToWord32(value)); |
| 1170 | var_type_feedback->Bind( |
| 1171 | Word32Or(var_type_feedback->value(), |
| 1172 | Int32Constant(BinaryOperationFeedback::kSignedSmall))); |
| 1173 | Goto(&done_loop); |
| 1174 | } |
| 1175 | |
| 1176 | Bind(&if_valueisnotsmi); |
| 1177 | { |
| 1178 | // Check if {value} is a HeapNumber. |
| 1179 | Label if_valueisheapnumber(this), |
| 1180 | if_valueisnotheapnumber(this, Label::kDeferred); |
| 1181 | Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()), |
| 1182 | &if_valueisheapnumber, &if_valueisnotheapnumber); |
| 1183 | |
| 1184 | Bind(&if_valueisheapnumber); |
| 1185 | { |
| 1186 | // Truncate the floating point value. |
| 1187 | var_result.Bind(TruncateHeapNumberValueToWord32(value)); |
| 1188 | var_type_feedback->Bind( |
| 1189 | Word32Or(var_type_feedback->value(), |
| 1190 | Int32Constant(BinaryOperationFeedback::kNumber))); |
| 1191 | Goto(&done_loop); |
| 1192 | } |
| 1193 | |
| 1194 | Bind(&if_valueisnotheapnumber); |
| 1195 | { |
| 1196 | // Convert the {value} to a Number first. |
| 1197 | Callable callable = CodeFactory::NonNumberToNumber(isolate()); |
| 1198 | var_value.Bind(CallStub(callable, context, value)); |
| 1199 | var_type_feedback->Bind(Int32Constant(BinaryOperationFeedback::kAny)); |
| 1200 | Goto(&loop); |
| 1201 | } |
| 1202 | } |
| 1203 | } |
| 1204 | Bind(&done_loop); |
| 1205 | return var_result.value(); |
| 1206 | } |
| 1207 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1208 | void InterpreterAssembler::UpdateInterruptBudgetOnReturn() { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1209 | // TODO(rmcilroy): Investigate whether it is worth supporting self |
| 1210 | // optimization of primitive functions like FullCodegen. |
| 1211 | |
| 1212 | // Update profiling count by -BytecodeOffset to simulate backedge to start of |
| 1213 | // function. |
| 1214 | Node* profiling_weight = |
| 1215 | Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), |
| 1216 | BytecodeOffset()); |
| 1217 | UpdateInterruptBudget(profiling_weight); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1218 | } |
| 1219 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1220 | Node* InterpreterAssembler::StackCheckTriggeredInterrupt() { |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1221 | Node* sp = LoadStackPointer(); |
| 1222 | Node* stack_limit = Load( |
| 1223 | MachineType::Pointer(), |
| 1224 | ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1225 | return UintPtrLessThan(sp, stack_limit); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1226 | } |
| 1227 | |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1228 | Node* InterpreterAssembler::LoadOSRNestingLevel() { |
| 1229 | Node* offset = |
| 1230 | IntPtrConstant(BytecodeArray::kOSRNestingLevelOffset - kHeapObjectTag); |
| 1231 | return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), offset); |
| 1232 | } |
| 1233 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1234 | void InterpreterAssembler::Abort(BailoutReason bailout_reason) { |
| 1235 | disable_stack_check_across_call_ = true; |
| 1236 | Node* abort_id = SmiTag(Int32Constant(bailout_reason)); |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1237 | CallRuntime(Runtime::kAbort, GetContext(), abort_id); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1238 | disable_stack_check_across_call_ = false; |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1239 | } |
| 1240 | |
| 1241 | void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, |
| 1242 | BailoutReason bailout_reason) { |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1243 | Label ok(this), abort(this, Label::kDeferred); |
| 1244 | BranchIfWordEqual(lhs, rhs, &ok, &abort); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1245 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1246 | Bind(&abort); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1247 | Abort(bailout_reason); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1248 | Goto(&ok); |
| 1249 | |
| 1250 | Bind(&ok); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1251 | } |
| 1252 | |
| 1253 | void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) { |
| 1254 | CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(), |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1255 | SmiTag(BytecodeOffset()), GetAccumulatorUnchecked()); |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1256 | } |
| 1257 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1258 | void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) { |
| 1259 | Node* counters_table = ExternalConstant( |
| 1260 | ExternalReference::interpreter_dispatch_counters(isolate())); |
| 1261 | Node* source_bytecode_table_index = IntPtrConstant( |
| 1262 | static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1)); |
| 1263 | |
| 1264 | Node* counter_offset = |
| 1265 | WordShl(IntPtrAdd(source_bytecode_table_index, target_bytecode), |
| 1266 | IntPtrConstant(kPointerSizeLog2)); |
| 1267 | Node* old_counter = |
| 1268 | Load(MachineType::IntPtr(), counters_table, counter_offset); |
| 1269 | |
| 1270 | Label counter_ok(this), counter_saturated(this, Label::kDeferred); |
| 1271 | |
| 1272 | Node* counter_reached_max = WordEqual( |
| 1273 | old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max())); |
| 1274 | BranchIf(counter_reached_max, &counter_saturated, &counter_ok); |
| 1275 | |
| 1276 | Bind(&counter_ok); |
| 1277 | { |
| 1278 | Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1)); |
| 1279 | StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table, |
| 1280 | counter_offset, new_counter); |
| 1281 | Goto(&counter_saturated); |
| 1282 | } |
| 1283 | |
| 1284 | Bind(&counter_saturated); |
| 1285 | } |
| 1286 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1287 | // static |
| 1288 | bool InterpreterAssembler::TargetSupportsUnalignedAccess() { |
| 1289 | #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 |
| 1290 | return false; |
| Ben Murdoch | 3b9bc31 | 2016-06-02 14:46:10 +0100 | [diff] [blame] | 1291 | #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 || \ |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1292 | V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || \ |
| 1293 | V8_TARGET_ARCH_PPC |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1294 | return true; |
| 1295 | #else |
| 1296 | #error "Unknown Architecture" |
| 1297 | #endif |
| 1298 | } |
| 1299 | |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1300 | Node* InterpreterAssembler::RegisterCount() { |
| 1301 | Node* bytecode_array = LoadRegister(Register::bytecode_array()); |
| 1302 | Node* frame_size = LoadObjectField( |
| 1303 | bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32()); |
| 1304 | return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2)); |
| 1305 | } |
| 1306 | |
| 1307 | Node* InterpreterAssembler::ExportRegisterFile(Node* array) { |
| 1308 | if (FLAG_debug_code) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1309 | Node* array_size = LoadAndUntagFixedArrayBaseLength(array); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1310 | AbortIfWordNotEqual( |
| 1311 | array_size, RegisterCount(), kInvalidRegisterFileInGenerator); |
| 1312 | } |
| 1313 | |
| 1314 | Variable var_index(this, MachineRepresentation::kWord32); |
| 1315 | var_index.Bind(Int32Constant(0)); |
| 1316 | |
| 1317 | // Iterate over register file and write values into array. |
| 1318 | // The mapping of register to array index must match that used in |
| 1319 | // BytecodeGraphBuilder::VisitResumeGenerator. |
| 1320 | Label loop(this, &var_index), done_loop(this); |
| 1321 | Goto(&loop); |
| 1322 | Bind(&loop); |
| 1323 | { |
| 1324 | Node* index = var_index.value(); |
| 1325 | Node* condition = Int32LessThan(index, RegisterCount()); |
| 1326 | GotoUnless(condition, &done_loop); |
| 1327 | |
| 1328 | Node* reg_index = |
| 1329 | Int32Sub(Int32Constant(Register(0).ToOperand()), index); |
| 1330 | Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index)); |
| 1331 | |
| 1332 | StoreFixedArrayElement(array, index, value); |
| 1333 | |
| 1334 | var_index.Bind(Int32Add(index, Int32Constant(1))); |
| 1335 | Goto(&loop); |
| 1336 | } |
| 1337 | Bind(&done_loop); |
| 1338 | |
| 1339 | return array; |
| 1340 | } |
| 1341 | |
| 1342 | Node* InterpreterAssembler::ImportRegisterFile(Node* array) { |
| 1343 | if (FLAG_debug_code) { |
| Ben Murdoch | f91f061 | 2016-11-29 16:50:11 +0000 | [diff] [blame] | 1344 | Node* array_size = LoadAndUntagFixedArrayBaseLength(array); |
| Ben Murdoch | bcf72ee | 2016-08-08 18:44:38 +0100 | [diff] [blame] | 1345 | AbortIfWordNotEqual( |
| 1346 | array_size, RegisterCount(), kInvalidRegisterFileInGenerator); |
| 1347 | } |
| 1348 | |
| 1349 | Variable var_index(this, MachineRepresentation::kWord32); |
| 1350 | var_index.Bind(Int32Constant(0)); |
| 1351 | |
| 1352 | // Iterate over array and write values into register file. Also erase the |
| 1353 | // array contents to not keep them alive artificially. |
| 1354 | Label loop(this, &var_index), done_loop(this); |
| 1355 | Goto(&loop); |
| 1356 | Bind(&loop); |
| 1357 | { |
| 1358 | Node* index = var_index.value(); |
| 1359 | Node* condition = Int32LessThan(index, RegisterCount()); |
| 1360 | GotoUnless(condition, &done_loop); |
| 1361 | |
| 1362 | Node* value = LoadFixedArrayElement(array, index); |
| 1363 | |
| 1364 | Node* reg_index = |
| 1365 | Int32Sub(Int32Constant(Register(0).ToOperand()), index); |
| 1366 | StoreRegister(value, ChangeInt32ToIntPtr(reg_index)); |
| 1367 | |
| 1368 | StoreFixedArrayElement(array, index, StaleRegisterConstant()); |
| 1369 | |
| 1370 | var_index.Bind(Int32Add(index, Int32Constant(1))); |
| 1371 | Goto(&loop); |
| 1372 | } |
| 1373 | Bind(&done_loop); |
| 1374 | |
| 1375 | return array; |
| 1376 | } |
| 1377 | |
| Ben Murdoch | 109988c | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1378 | } // namespace interpreter |
| 1379 | } // namespace internal |
| 1380 | } // namespace v8 |