Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame^] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "src/interpreter/interpreter-assembler.h" |
| 6 | |
| 7 | #include <ostream> |
| 8 | |
| 9 | #include "src/code-factory.h" |
| 10 | #include "src/frames.h" |
| 11 | #include "src/interface-descriptors.h" |
| 12 | #include "src/interpreter/bytecodes.h" |
| 13 | #include "src/interpreter/interpreter.h" |
| 14 | #include "src/machine-type.h" |
| 15 | #include "src/macro-assembler.h" |
| 16 | #include "src/zone.h" |
| 17 | |
| 18 | namespace v8 { |
| 19 | namespace internal { |
| 20 | namespace interpreter { |
| 21 | |
| 22 | using compiler::Node; |
| 23 | |
| 24 | InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, |
| 25 | Bytecode bytecode) |
| 26 | : compiler::CodeStubAssembler( |
| 27 | isolate, zone, InterpreterDispatchDescriptor(isolate), |
| 28 | Code::ComputeFlags(Code::STUB), Bytecodes::ToString(bytecode), 0), |
| 29 | bytecode_(bytecode), |
| 30 | accumulator_(this, MachineRepresentation::kTagged), |
| 31 | context_(this, MachineRepresentation::kTagged), |
| 32 | bytecode_array_(this, MachineRepresentation::kTagged), |
| 33 | disable_stack_check_across_call_(false), |
| 34 | stack_pointer_before_call_(nullptr) { |
| 35 | accumulator_.Bind( |
| 36 | Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); |
| 37 | context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); |
| 38 | bytecode_array_.Bind( |
| 39 | Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); |
| 40 | if (FLAG_trace_ignition) { |
| 41 | TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); |
| 42 | } |
| 43 | } |
| 44 | |
| 45 | InterpreterAssembler::~InterpreterAssembler() {} |
| 46 | |
| 47 | Node* InterpreterAssembler::GetAccumulator() { return accumulator_.value(); } |
| 48 | |
| 49 | void InterpreterAssembler::SetAccumulator(Node* value) { |
| 50 | accumulator_.Bind(value); |
| 51 | } |
| 52 | |
| 53 | Node* InterpreterAssembler::GetContext() { return context_.value(); } |
| 54 | |
| 55 | void InterpreterAssembler::SetContext(Node* value) { |
| 56 | StoreRegister(value, Register::current_context()); |
| 57 | context_.Bind(value); |
| 58 | } |
| 59 | |
| 60 | Node* InterpreterAssembler::BytecodeOffset() { |
| 61 | return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter); |
| 62 | } |
| 63 | |
| 64 | Node* InterpreterAssembler::RegisterFileRawPointer() { |
| 65 | return Parameter(InterpreterDispatchDescriptor::kRegisterFileParameter); |
| 66 | } |
| 67 | |
| 68 | Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { |
| 69 | return bytecode_array_.value(); |
| 70 | } |
| 71 | |
| 72 | Node* InterpreterAssembler::DispatchTableRawPointer() { |
| 73 | return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter); |
| 74 | } |
| 75 | |
| 76 | Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { |
| 77 | return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); |
| 78 | } |
| 79 | |
| 80 | Node* InterpreterAssembler::LoadRegister(int offset) { |
| 81 | return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 82 | Int32Constant(offset)); |
| 83 | } |
| 84 | |
| 85 | Node* InterpreterAssembler::LoadRegister(Register reg) { |
| 86 | return LoadRegister(reg.ToOperand() << kPointerSizeLog2); |
| 87 | } |
| 88 | |
| 89 | Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { |
| 90 | return WordShl(index, kPointerSizeLog2); |
| 91 | } |
| 92 | |
| 93 | Node* InterpreterAssembler::LoadRegister(Node* reg_index) { |
| 94 | return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 95 | RegisterFrameOffset(reg_index)); |
| 96 | } |
| 97 | |
| 98 | Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { |
| 99 | return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 100 | RegisterFileRawPointer(), Int32Constant(offset), |
| 101 | value); |
| 102 | } |
| 103 | |
| 104 | Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { |
| 105 | return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); |
| 106 | } |
| 107 | |
| 108 | Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { |
| 109 | return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 110 | RegisterFileRawPointer(), |
| 111 | RegisterFrameOffset(reg_index), value); |
| 112 | } |
| 113 | |
| 114 | Node* InterpreterAssembler::NextRegister(Node* reg_index) { |
| 115 | // Register indexes are negative, so the next index is minus one. |
| 116 | return IntPtrAdd(reg_index, Int32Constant(-1)); |
| 117 | } |
| 118 | |
| 119 | Node* InterpreterAssembler::BytecodeOperand(int operand_index) { |
| 120 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 121 | DCHECK_EQ(OperandSize::kByte, |
| 122 | Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 123 | return Load( |
| 124 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 125 | IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 126 | bytecode_, operand_index)))); |
| 127 | } |
| 128 | |
| 129 | Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { |
| 130 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 131 | DCHECK_EQ(OperandSize::kByte, |
| 132 | Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 133 | Node* load = Load( |
| 134 | MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 135 | IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 136 | bytecode_, operand_index)))); |
| 137 | // Ensure that we sign extend to full pointer size |
| 138 | if (kPointerSize == 8) { |
| 139 | load = ChangeInt32ToInt64(load); |
| 140 | } |
| 141 | return load; |
| 142 | } |
| 143 | |
| 144 | Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { |
| 145 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 146 | DCHECK_EQ(OperandSize::kShort, |
| 147 | Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 148 | if (TargetSupportsUnalignedAccess()) { |
| 149 | return Load( |
| 150 | MachineType::Uint16(), BytecodeArrayTaggedPointer(), |
| 151 | IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 152 | bytecode_, operand_index)))); |
| 153 | } else { |
| 154 | int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 155 | Node* first_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 156 | IntPtrAdd(BytecodeOffset(), Int32Constant(offset))); |
| 157 | Node* second_byte = |
| 158 | Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 159 | IntPtrAdd(BytecodeOffset(), Int32Constant(offset + 1))); |
| 160 | #if V8_TARGET_LITTLE_ENDIAN |
| 161 | return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); |
| 162 | #elif V8_TARGET_BIG_ENDIAN |
| 163 | return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); |
| 164 | #else |
| 165 | #error "Unknown Architecture" |
| 166 | #endif |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | Node* InterpreterAssembler::BytecodeOperandShortSignExtended( |
| 171 | int operand_index) { |
| 172 | DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 173 | DCHECK_EQ(OperandSize::kShort, |
| 174 | Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 175 | int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 176 | Node* load; |
| 177 | if (TargetSupportsUnalignedAccess()) { |
| 178 | load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), |
| 179 | IntPtrAdd(BytecodeOffset(), Int32Constant(operand_offset))); |
| 180 | } else { |
| 181 | #if V8_TARGET_LITTLE_ENDIAN |
| 182 | Node* hi_byte_offset = Int32Constant(operand_offset + 1); |
| 183 | Node* lo_byte_offset = Int32Constant(operand_offset); |
| 184 | #elif V8_TARGET_BIG_ENDIAN |
| 185 | Node* hi_byte_offset = Int32Constant(operand_offset); |
| 186 | Node* lo_byte_offset = Int32Constant(operand_offset + 1); |
| 187 | #else |
| 188 | #error "Unknown Architecture" |
| 189 | #endif |
| 190 | Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 191 | IntPtrAdd(BytecodeOffset(), hi_byte_offset)); |
| 192 | Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 193 | IntPtrAdd(BytecodeOffset(), lo_byte_offset)); |
| 194 | hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); |
| 195 | load = Word32Or(hi_byte, lo_byte); |
| 196 | } |
| 197 | |
| 198 | // Ensure that we sign extend to full pointer size |
| 199 | if (kPointerSize == 8) { |
| 200 | load = ChangeInt32ToInt64(load); |
| 201 | } |
| 202 | return load; |
| 203 | } |
| 204 | |
| 205 | Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { |
| 206 | switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 207 | case OperandSize::kByte: |
| 208 | DCHECK_EQ(OperandType::kRegCount8, |
| 209 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 210 | return BytecodeOperand(operand_index); |
| 211 | case OperandSize::kShort: |
| 212 | DCHECK_EQ(OperandType::kRegCount16, |
| 213 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 214 | return BytecodeOperandShort(operand_index); |
| 215 | case OperandSize::kNone: |
| 216 | UNREACHABLE(); |
| 217 | } |
| 218 | return nullptr; |
| 219 | } |
| 220 | |
| 221 | Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { |
| 222 | DCHECK_EQ(OperandType::kImm8, |
| 223 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 224 | return BytecodeOperandSignExtended(operand_index); |
| 225 | } |
| 226 | |
| 227 | Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { |
| 228 | switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 229 | case OperandSize::kByte: |
| 230 | DCHECK_EQ(OperandType::kIdx8, |
| 231 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 232 | return BytecodeOperand(operand_index); |
| 233 | case OperandSize::kShort: |
| 234 | DCHECK_EQ(OperandType::kIdx16, |
| 235 | Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 236 | return BytecodeOperandShort(operand_index); |
| 237 | case OperandSize::kNone: |
| 238 | UNREACHABLE(); |
| 239 | } |
| 240 | return nullptr; |
| 241 | } |
| 242 | |
| 243 | Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { |
| 244 | OperandType operand_type = |
| 245 | Bytecodes::GetOperandType(bytecode_, operand_index); |
| 246 | if (Bytecodes::IsRegisterOperandType(operand_type)) { |
| 247 | OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); |
| 248 | if (operand_size == OperandSize::kByte) { |
| 249 | return BytecodeOperandSignExtended(operand_index); |
| 250 | } else if (operand_size == OperandSize::kShort) { |
| 251 | return BytecodeOperandShortSignExtended(operand_index); |
| 252 | } |
| 253 | } |
| 254 | UNREACHABLE(); |
| 255 | return nullptr; |
| 256 | } |
| 257 | |
| 258 | Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 259 | Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 260 | BytecodeArray::kConstantPoolOffset); |
| 261 | Node* entry_offset = |
| 262 | IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 263 | WordShl(index, kPointerSizeLog2)); |
| 264 | return Load(MachineType::AnyTagged(), constant_pool, entry_offset); |
| 265 | } |
| 266 | |
| 267 | Node* InterpreterAssembler::LoadFixedArrayElement(Node* fixed_array, |
| 268 | int index) { |
| 269 | Node* entry_offset = |
| 270 | IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 271 | WordShl(Int32Constant(index), kPointerSizeLog2)); |
| 272 | return Load(MachineType::AnyTagged(), fixed_array, entry_offset); |
| 273 | } |
| 274 | |
| 275 | Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) { |
| 276 | return Load(MachineType::AnyTagged(), object, |
| 277 | IntPtrConstant(offset - kHeapObjectTag)); |
| 278 | } |
| 279 | |
| 280 | Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { |
| 281 | return Load(MachineType::AnyTagged(), context, |
| 282 | IntPtrConstant(Context::SlotOffset(slot_index))); |
| 283 | } |
| 284 | |
| 285 | Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { |
| 286 | Node* offset = |
| 287 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 288 | Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 289 | return Load(MachineType::AnyTagged(), context, offset); |
| 290 | } |
| 291 | |
| 292 | Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, |
| 293 | Node* value) { |
| 294 | Node* offset = |
| 295 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 296 | Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 297 | return Store(MachineRepresentation::kTagged, context, offset, value); |
| 298 | } |
| 299 | |
| 300 | Node* InterpreterAssembler::LoadTypeFeedbackVector() { |
| 301 | Node* function = Load( |
| 302 | MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 303 | IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer)); |
| 304 | Node* shared_info = |
| 305 | LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset); |
| 306 | Node* vector = |
| 307 | LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset); |
| 308 | return vector; |
| 309 | } |
| 310 | |
| 311 | void InterpreterAssembler::CallPrologue() { |
| 312 | StoreRegister(SmiTag(BytecodeOffset()), |
| 313 | InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer); |
| 314 | StoreRegister(BytecodeArrayTaggedPointer(), |
| 315 | InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer); |
| 316 | |
| 317 | if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 318 | DCHECK(stack_pointer_before_call_ == nullptr); |
| 319 | stack_pointer_before_call_ = LoadStackPointer(); |
| 320 | } |
| 321 | } |
| 322 | |
| 323 | void InterpreterAssembler::CallEpilogue() { |
| 324 | if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 325 | Node* stack_pointer_after_call = LoadStackPointer(); |
| 326 | Node* stack_pointer_before_call = stack_pointer_before_call_; |
| 327 | stack_pointer_before_call_ = nullptr; |
| 328 | AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 329 | kUnexpectedStackPointer); |
| 330 | } |
| 331 | |
| 332 | // Restore bytecode array from stack frame in case the debugger has swapped us |
| 333 | // to the patched debugger bytecode array. |
| 334 | bytecode_array_.Bind(LoadRegister( |
| 335 | InterpreterFrameConstants::kBytecodeArrayFromRegisterPointer)); |
| 336 | } |
| 337 | |
| 338 | Node* InterpreterAssembler::CallJS(Node* function, Node* context, |
| 339 | Node* first_arg, Node* arg_count, |
| 340 | TailCallMode tail_call_mode) { |
| 341 | Callable callable = |
| 342 | CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode); |
| 343 | Node* code_target = HeapConstant(callable.code()); |
| 344 | return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 345 | first_arg, function); |
| 346 | } |
| 347 | |
| 348 | Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context, |
| 349 | Node* new_target, Node* first_arg, |
| 350 | Node* arg_count) { |
| 351 | Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate()); |
| 352 | Node* code_target = HeapConstant(callable.code()); |
| 353 | return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 354 | new_target, constructor, first_arg); |
| 355 | } |
| 356 | |
| 357 | Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context, |
| 358 | Node* first_arg, Node* arg_count, |
| 359 | int result_size) { |
| 360 | Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); |
| 361 | Node* code_target = HeapConstant(callable.code()); |
| 362 | |
| 363 | // Get the function entry from the function id. |
| 364 | Node* function_table = ExternalConstant( |
| 365 | ExternalReference::runtime_function_table_address(isolate())); |
| 366 | Node* function_offset = |
| 367 | Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function))); |
| 368 | Node* function = IntPtrAdd(function_table, function_offset); |
| 369 | Node* function_entry = |
| 370 | Load(MachineType::Pointer(), function, |
| 371 | Int32Constant(offsetof(Runtime::Function, entry))); |
| 372 | |
| 373 | return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 374 | first_arg, function_entry, result_size); |
| 375 | } |
| 376 | |
| 377 | void InterpreterAssembler::UpdateInterruptBudget(Node* weight) { |
| 378 | CodeStubAssembler::Label ok(this); |
| 379 | CodeStubAssembler::Label interrupt_check(this); |
| 380 | CodeStubAssembler::Label end(this); |
| 381 | Node* budget_offset = |
| 382 | IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag); |
| 383 | |
| 384 | // Update budget by |weight| and check if it reaches zero. |
| 385 | Node* old_budget = |
| 386 | Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset); |
| 387 | Node* new_budget = Int32Add(old_budget, weight); |
| 388 | Node* condition = Int32GreaterThanOrEqual(new_budget, Int32Constant(0)); |
| 389 | Branch(condition, &ok, &interrupt_check); |
| 390 | |
| 391 | // Perform interrupt and reset budget. |
| 392 | Bind(&interrupt_check); |
| 393 | CallRuntime(Runtime::kInterrupt, GetContext()); |
| 394 | StoreNoWriteBarrier(MachineRepresentation::kWord32, |
| 395 | BytecodeArrayTaggedPointer(), budget_offset, |
| 396 | Int32Constant(Interpreter::InterruptBudget())); |
| 397 | Goto(&end); |
| 398 | |
| 399 | // Update budget. |
| 400 | Bind(&ok); |
| 401 | StoreNoWriteBarrier(MachineRepresentation::kWord32, |
| 402 | BytecodeArrayTaggedPointer(), budget_offset, new_budget); |
| 403 | Goto(&end); |
| 404 | Bind(&end); |
| 405 | } |
| 406 | |
| 407 | Node* InterpreterAssembler::Advance(int delta) { |
| 408 | return IntPtrAdd(BytecodeOffset(), Int32Constant(delta)); |
| 409 | } |
| 410 | |
| 411 | Node* InterpreterAssembler::Advance(Node* delta) { |
| 412 | return IntPtrAdd(BytecodeOffset(), delta); |
| 413 | } |
| 414 | |
| 415 | void InterpreterAssembler::Jump(Node* delta) { |
| 416 | UpdateInterruptBudget(delta); |
| 417 | DispatchTo(Advance(delta)); |
| 418 | } |
| 419 | |
| 420 | void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { |
| 421 | CodeStubAssembler::Label match(this); |
| 422 | CodeStubAssembler::Label no_match(this); |
| 423 | |
| 424 | Branch(condition, &match, &no_match); |
| 425 | Bind(&match); |
| 426 | Jump(delta); |
| 427 | Bind(&no_match); |
| 428 | Dispatch(); |
| 429 | } |
| 430 | |
| 431 | void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { |
| 432 | JumpConditional(WordEqual(lhs, rhs), delta); |
| 433 | } |
| 434 | |
| 435 | void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, |
| 436 | Node* delta) { |
| 437 | JumpConditional(WordNotEqual(lhs, rhs), delta); |
| 438 | } |
| 439 | |
| 440 | void InterpreterAssembler::Dispatch() { |
| 441 | DispatchTo(Advance(Bytecodes::Size(bytecode_))); |
| 442 | } |
| 443 | |
| 444 | void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { |
| 445 | Node* target_bytecode = Load( |
| 446 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); |
| 447 | |
| 448 | // TODO(rmcilroy): Create a code target dispatch table to avoid conversion |
| 449 | // from code object on every dispatch. |
| 450 | Node* target_code_object = |
| 451 | Load(MachineType::Pointer(), DispatchTableRawPointer(), |
| 452 | Word32Shl(target_bytecode, Int32Constant(kPointerSizeLog2))); |
| 453 | |
| 454 | DispatchToBytecodeHandler(target_code_object, new_bytecode_offset); |
| 455 | } |
| 456 | |
| 457 | void InterpreterAssembler::DispatchToBytecodeHandler(Node* handler, |
| 458 | Node* bytecode_offset) { |
| 459 | if (FLAG_trace_ignition) { |
| 460 | TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); |
| 461 | } |
| 462 | |
| 463 | InterpreterDispatchDescriptor descriptor(isolate()); |
| 464 | Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), |
| 465 | bytecode_offset, BytecodeArrayTaggedPointer(), |
| 466 | DispatchTableRawPointer(), GetContext()}; |
| 467 | TailCall(descriptor, handler, args, 0); |
| 468 | } |
| 469 | |
| 470 | void InterpreterAssembler::InterpreterReturn() { |
| 471 | // TODO(rmcilroy): Investigate whether it is worth supporting self |
| 472 | // optimization of primitive functions like FullCodegen. |
| 473 | |
| 474 | // Update profiling count by -BytecodeOffset to simulate backedge to start of |
| 475 | // function. |
| 476 | Node* profiling_weight = |
| 477 | Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), |
| 478 | BytecodeOffset()); |
| 479 | UpdateInterruptBudget(profiling_weight); |
| 480 | |
| 481 | Node* exit_trampoline_code_object = |
| 482 | HeapConstant(isolate()->builtins()->InterpreterExitTrampoline()); |
| 483 | DispatchToBytecodeHandler(exit_trampoline_code_object); |
| 484 | } |
| 485 | |
| 486 | void InterpreterAssembler::StackCheck() { |
| 487 | CodeStubAssembler::Label end(this); |
| 488 | CodeStubAssembler::Label ok(this); |
| 489 | CodeStubAssembler::Label stack_guard(this); |
| 490 | |
| 491 | Node* sp = LoadStackPointer(); |
| 492 | Node* stack_limit = Load( |
| 493 | MachineType::Pointer(), |
| 494 | ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))); |
| 495 | Node* condition = UintPtrGreaterThanOrEqual(sp, stack_limit); |
| 496 | Branch(condition, &ok, &stack_guard); |
| 497 | Bind(&stack_guard); |
| 498 | CallRuntime(Runtime::kStackGuard, GetContext()); |
| 499 | Goto(&end); |
| 500 | Bind(&ok); |
| 501 | Goto(&end); |
| 502 | Bind(&end); |
| 503 | } |
| 504 | |
| 505 | void InterpreterAssembler::Abort(BailoutReason bailout_reason) { |
| 506 | disable_stack_check_across_call_ = true; |
| 507 | Node* abort_id = SmiTag(Int32Constant(bailout_reason)); |
| 508 | Node* ret_value = CallRuntime(Runtime::kAbort, GetContext(), abort_id); |
| 509 | disable_stack_check_across_call_ = false; |
| 510 | // Unreached, but keeps turbofan happy. |
| 511 | Return(ret_value); |
| 512 | } |
| 513 | |
| 514 | void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, |
| 515 | BailoutReason bailout_reason) { |
| 516 | CodeStubAssembler::Label match(this); |
| 517 | CodeStubAssembler::Label no_match(this); |
| 518 | |
| 519 | Node* condition = WordEqual(lhs, rhs); |
| 520 | Branch(condition, &match, &no_match); |
| 521 | Bind(&no_match); |
| 522 | Abort(bailout_reason); |
| 523 | Bind(&match); |
| 524 | } |
| 525 | |
| 526 | void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) { |
| 527 | CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(), |
| 528 | SmiTag(BytecodeOffset()), GetAccumulator()); |
| 529 | } |
| 530 | |
| 531 | // static |
| 532 | bool InterpreterAssembler::TargetSupportsUnalignedAccess() { |
| 533 | #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 |
| 534 | return false; |
| 535 | #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC |
| 536 | return CpuFeatures::IsSupported(UNALIGNED_ACCESSES); |
| 537 | #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 |
| 538 | return true; |
| 539 | #else |
| 540 | #error "Unknown Architecture" |
| 541 | #endif |
| 542 | } |
| 543 | |
| 544 | } // namespace interpreter |
| 545 | } // namespace internal |
| 546 | } // namespace v8 |