Ben Murdoch | 4a90d5f | 2016-03-22 12:00:34 +0000 | [diff] [blame^] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "src/compiler/interpreter-assembler.h" |
| 6 | |
| 7 | #include <ostream> |
| 8 | |
| 9 | #include "src/code-factory.h" |
| 10 | #include "src/compiler/graph.h" |
| 11 | #include "src/compiler/instruction-selector.h" |
| 12 | #include "src/compiler/linkage.h" |
| 13 | #include "src/compiler/pipeline.h" |
| 14 | #include "src/compiler/raw-machine-assembler.h" |
| 15 | #include "src/compiler/schedule.h" |
| 16 | #include "src/frames.h" |
| 17 | #include "src/interface-descriptors.h" |
| 18 | #include "src/interpreter/bytecodes.h" |
| 19 | #include "src/machine-type.h" |
| 20 | #include "src/macro-assembler.h" |
| 21 | #include "src/zone.h" |
| 22 | |
| 23 | namespace v8 { |
| 24 | namespace internal { |
| 25 | namespace compiler { |
| 26 | |
| 27 | |
| 28 | InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, |
| 29 | interpreter::Bytecode bytecode) |
| 30 | : bytecode_(bytecode), |
| 31 | raw_assembler_(new RawMachineAssembler( |
| 32 | isolate, new (zone) Graph(zone), |
| 33 | Linkage::GetInterpreterDispatchDescriptor(zone), |
| 34 | MachineType::PointerRepresentation(), |
| 35 | InstructionSelector::SupportedMachineOperatorFlags())), |
| 36 | accumulator_( |
| 37 | raw_assembler_->Parameter(Linkage::kInterpreterAccumulatorParameter)), |
| 38 | bytecode_offset_(raw_assembler_->Parameter( |
| 39 | Linkage::kInterpreterBytecodeOffsetParameter)), |
| 40 | context_( |
| 41 | raw_assembler_->Parameter(Linkage::kInterpreterContextParameter)), |
| 42 | code_generated_(false) {} |
| 43 | |
| 44 | |
| 45 | InterpreterAssembler::~InterpreterAssembler() {} |
| 46 | |
| 47 | |
| 48 | Handle<Code> InterpreterAssembler::GenerateCode() { |
| 49 | DCHECK(!code_generated_); |
| 50 | |
| 51 | // Disallow empty handlers that never return. |
| 52 | DCHECK_NE(0, graph()->end()->InputCount()); |
| 53 | |
| 54 | const char* bytecode_name = interpreter::Bytecodes::ToString(bytecode_); |
| 55 | Schedule* schedule = raw_assembler_->Export(); |
| 56 | Handle<Code> code = Pipeline::GenerateCodeForCodeStub( |
| 57 | isolate(), raw_assembler_->call_descriptor(), graph(), schedule, |
| 58 | Code::STUB, bytecode_name); |
| 59 | |
| 60 | #ifdef ENABLE_DISASSEMBLER |
| 61 | if (FLAG_trace_ignition_codegen) { |
| 62 | OFStream os(stdout); |
| 63 | code->Disassemble(bytecode_name, os); |
| 64 | os << std::flush; |
| 65 | } |
| 66 | #endif |
| 67 | |
| 68 | code_generated_ = true; |
| 69 | return code; |
| 70 | } |
| 71 | |
| 72 | |
| 73 | Node* InterpreterAssembler::GetAccumulator() { return accumulator_; } |
| 74 | |
| 75 | |
| 76 | void InterpreterAssembler::SetAccumulator(Node* value) { accumulator_ = value; } |
| 77 | |
| 78 | |
| 79 | Node* InterpreterAssembler::GetContext() { return context_; } |
| 80 | |
| 81 | |
| 82 | void InterpreterAssembler::SetContext(Node* value) { context_ = value; } |
| 83 | |
| 84 | |
| 85 | Node* InterpreterAssembler::BytecodeOffset() { return bytecode_offset_; } |
| 86 | |
| 87 | |
| 88 | Node* InterpreterAssembler::RegisterFileRawPointer() { |
| 89 | return raw_assembler_->Parameter(Linkage::kInterpreterRegisterFileParameter); |
| 90 | } |
| 91 | |
| 92 | |
| 93 | Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { |
| 94 | return raw_assembler_->Parameter(Linkage::kInterpreterBytecodeArrayParameter); |
| 95 | } |
| 96 | |
| 97 | |
| 98 | Node* InterpreterAssembler::DispatchTableRawPointer() { |
| 99 | return raw_assembler_->Parameter(Linkage::kInterpreterDispatchTableParameter); |
| 100 | } |
| 101 | |
| 102 | |
| 103 | Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { |
| 104 | return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); |
| 105 | } |
| 106 | |
| 107 | |
| 108 | Node* InterpreterAssembler::LoadRegister(int offset) { |
| 109 | return raw_assembler_->Load(MachineType::AnyTagged(), |
| 110 | RegisterFileRawPointer(), Int32Constant(offset)); |
| 111 | } |
| 112 | |
| 113 | |
| 114 | Node* InterpreterAssembler::LoadRegister(interpreter::Register reg) { |
| 115 | return LoadRegister(reg.ToOperand() << kPointerSizeLog2); |
| 116 | } |
| 117 | |
| 118 | |
| 119 | Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { |
| 120 | return WordShl(index, kPointerSizeLog2); |
| 121 | } |
| 122 | |
| 123 | |
| 124 | Node* InterpreterAssembler::LoadRegister(Node* reg_index) { |
| 125 | return raw_assembler_->Load(MachineType::AnyTagged(), |
| 126 | RegisterFileRawPointer(), |
| 127 | RegisterFrameOffset(reg_index)); |
| 128 | } |
| 129 | |
| 130 | |
| 131 | Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { |
| 132 | return raw_assembler_->Store(MachineRepresentation::kTagged, |
| 133 | RegisterFileRawPointer(), Int32Constant(offset), |
| 134 | value, kNoWriteBarrier); |
| 135 | } |
| 136 | |
| 137 | |
| 138 | Node* InterpreterAssembler::StoreRegister(Node* value, |
| 139 | interpreter::Register reg) { |
| 140 | return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); |
| 141 | } |
| 142 | |
| 143 | |
| 144 | Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { |
| 145 | return raw_assembler_->Store( |
| 146 | MachineRepresentation::kTagged, RegisterFileRawPointer(), |
| 147 | RegisterFrameOffset(reg_index), value, kNoWriteBarrier); |
| 148 | } |
| 149 | |
| 150 | |
| 151 | Node* InterpreterAssembler::NextRegister(Node* reg_index) { |
| 152 | // Register indexes are negative, so the next index is minus one. |
| 153 | return IntPtrAdd(reg_index, Int32Constant(-1)); |
| 154 | } |
| 155 | |
| 156 | |
| 157 | Node* InterpreterAssembler::BytecodeOperand(int operand_index) { |
| 158 | DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); |
| 159 | DCHECK_EQ(interpreter::OperandSize::kByte, |
| 160 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 161 | return raw_assembler_->Load( |
| 162 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 163 | IntPtrAdd(BytecodeOffset(), |
| 164 | Int32Constant(interpreter::Bytecodes::GetOperandOffset( |
| 165 | bytecode_, operand_index)))); |
| 166 | } |
| 167 | |
| 168 | |
| 169 | Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { |
| 170 | DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); |
| 171 | DCHECK_EQ(interpreter::OperandSize::kByte, |
| 172 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 173 | Node* load = raw_assembler_->Load( |
| 174 | MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 175 | IntPtrAdd(BytecodeOffset(), |
| 176 | Int32Constant(interpreter::Bytecodes::GetOperandOffset( |
| 177 | bytecode_, operand_index)))); |
| 178 | // Ensure that we sign extend to full pointer size |
| 179 | if (kPointerSize == 8) { |
| 180 | load = raw_assembler_->ChangeInt32ToInt64(load); |
| 181 | } |
| 182 | return load; |
| 183 | } |
| 184 | |
| 185 | |
| 186 | Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { |
| 187 | DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); |
| 188 | DCHECK_EQ(interpreter::OperandSize::kShort, |
| 189 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 190 | if (TargetSupportsUnalignedAccess()) { |
| 191 | return raw_assembler_->Load( |
| 192 | MachineType::Uint16(), BytecodeArrayTaggedPointer(), |
| 193 | IntPtrAdd(BytecodeOffset(), |
| 194 | Int32Constant(interpreter::Bytecodes::GetOperandOffset( |
| 195 | bytecode_, operand_index)))); |
| 196 | } else { |
| 197 | int offset = |
| 198 | interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 199 | Node* first_byte = raw_assembler_->Load( |
| 200 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 201 | IntPtrAdd(BytecodeOffset(), Int32Constant(offset))); |
| 202 | Node* second_byte = raw_assembler_->Load( |
| 203 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 204 | IntPtrAdd(BytecodeOffset(), Int32Constant(offset + 1))); |
| 205 | #if V8_TARGET_LITTLE_ENDIAN |
| 206 | return raw_assembler_->WordOr(WordShl(second_byte, kBitsPerByte), |
| 207 | first_byte); |
| 208 | #elif V8_TARGET_BIG_ENDIAN |
| 209 | return raw_assembler_->WordOr(WordShl(first_byte, kBitsPerByte), |
| 210 | second_byte); |
| 211 | #else |
| 212 | #error "Unknown Architecture" |
| 213 | #endif |
| 214 | } |
| 215 | } |
| 216 | |
| 217 | |
| 218 | Node* InterpreterAssembler::BytecodeOperandShortSignExtended( |
| 219 | int operand_index) { |
| 220 | DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); |
| 221 | DCHECK_EQ(interpreter::OperandSize::kShort, |
| 222 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 223 | int operand_offset = |
| 224 | interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 225 | Node* load; |
| 226 | if (TargetSupportsUnalignedAccess()) { |
| 227 | load = raw_assembler_->Load( |
| 228 | MachineType::Int16(), BytecodeArrayTaggedPointer(), |
| 229 | IntPtrAdd(BytecodeOffset(), Int32Constant(operand_offset))); |
| 230 | } else { |
| 231 | #if V8_TARGET_LITTLE_ENDIAN |
| 232 | Node* hi_byte_offset = Int32Constant(operand_offset + 1); |
| 233 | Node* lo_byte_offset = Int32Constant(operand_offset); |
| 234 | #elif V8_TARGET_BIG_ENDIAN |
| 235 | Node* hi_byte_offset = Int32Constant(operand_offset); |
| 236 | Node* lo_byte_offset = Int32Constant(operand_offset + 1); |
| 237 | #else |
| 238 | #error "Unknown Architecture" |
| 239 | #endif |
| 240 | Node* hi_byte = |
| 241 | raw_assembler_->Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 242 | IntPtrAdd(BytecodeOffset(), hi_byte_offset)); |
| 243 | Node* lo_byte = |
| 244 | raw_assembler_->Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 245 | IntPtrAdd(BytecodeOffset(), lo_byte_offset)); |
| 246 | hi_byte = raw_assembler_->Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); |
| 247 | load = raw_assembler_->Word32Or(hi_byte, lo_byte); |
| 248 | } |
| 249 | |
| 250 | // Ensure that we sign extend to full pointer size |
| 251 | if (kPointerSize == 8) { |
| 252 | load = raw_assembler_->ChangeInt32ToInt64(load); |
| 253 | } |
| 254 | return load; |
| 255 | } |
| 256 | |
| 257 | |
| 258 | Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { |
| 259 | switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 260 | case interpreter::OperandSize::kByte: |
| 261 | DCHECK_EQ( |
| 262 | interpreter::OperandType::kCount8, |
| 263 | interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 264 | return BytecodeOperand(operand_index); |
| 265 | case interpreter::OperandSize::kShort: |
| 266 | DCHECK_EQ( |
| 267 | interpreter::OperandType::kCount16, |
| 268 | interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 269 | return BytecodeOperandShort(operand_index); |
| 270 | default: |
| 271 | UNREACHABLE(); |
| 272 | return nullptr; |
| 273 | } |
| 274 | } |
| 275 | |
| 276 | |
| 277 | Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { |
| 278 | DCHECK_EQ(interpreter::OperandType::kImm8, |
| 279 | interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 280 | return BytecodeOperandSignExtended(operand_index); |
| 281 | } |
| 282 | |
| 283 | |
| 284 | Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { |
| 285 | switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 286 | case interpreter::OperandSize::kByte: |
| 287 | DCHECK_EQ( |
| 288 | interpreter::OperandType::kIdx8, |
| 289 | interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 290 | return BytecodeOperand(operand_index); |
| 291 | case interpreter::OperandSize::kShort: |
| 292 | DCHECK_EQ( |
| 293 | interpreter::OperandType::kIdx16, |
| 294 | interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 295 | return BytecodeOperandShort(operand_index); |
| 296 | default: |
| 297 | UNREACHABLE(); |
| 298 | return nullptr; |
| 299 | } |
| 300 | } |
| 301 | |
| 302 | |
| 303 | Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { |
| 304 | switch (interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)) { |
| 305 | case interpreter::OperandType::kReg8: |
| 306 | case interpreter::OperandType::kRegPair8: |
| 307 | case interpreter::OperandType::kMaybeReg8: |
| 308 | DCHECK_EQ( |
| 309 | interpreter::OperandSize::kByte, |
| 310 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 311 | return BytecodeOperandSignExtended(operand_index); |
| 312 | case interpreter::OperandType::kReg16: |
| 313 | DCHECK_EQ( |
| 314 | interpreter::OperandSize::kShort, |
| 315 | interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 316 | return BytecodeOperandShortSignExtended(operand_index); |
| 317 | default: |
| 318 | UNREACHABLE(); |
| 319 | return nullptr; |
| 320 | } |
| 321 | } |
| 322 | |
| 323 | |
| 324 | Node* InterpreterAssembler::Int32Constant(int value) { |
| 325 | return raw_assembler_->Int32Constant(value); |
| 326 | } |
| 327 | |
| 328 | |
| 329 | Node* InterpreterAssembler::IntPtrConstant(intptr_t value) { |
| 330 | return raw_assembler_->IntPtrConstant(value); |
| 331 | } |
| 332 | |
| 333 | |
| 334 | Node* InterpreterAssembler::NumberConstant(double value) { |
| 335 | return raw_assembler_->NumberConstant(value); |
| 336 | } |
| 337 | |
| 338 | |
| 339 | Node* InterpreterAssembler::HeapConstant(Handle<HeapObject> object) { |
| 340 | return raw_assembler_->HeapConstant(object); |
| 341 | } |
| 342 | |
| 343 | |
| 344 | Node* InterpreterAssembler::BooleanConstant(bool value) { |
| 345 | return raw_assembler_->BooleanConstant(value); |
| 346 | } |
| 347 | |
| 348 | |
| 349 | Node* InterpreterAssembler::SmiShiftBitsConstant() { |
| 350 | return Int32Constant(kSmiShiftSize + kSmiTagSize); |
| 351 | } |
| 352 | |
| 353 | |
| 354 | Node* InterpreterAssembler::SmiTag(Node* value) { |
| 355 | return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); |
| 356 | } |
| 357 | |
| 358 | |
| 359 | Node* InterpreterAssembler::SmiUntag(Node* value) { |
| 360 | return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); |
| 361 | } |
| 362 | |
| 363 | |
| 364 | Node* InterpreterAssembler::IntPtrAdd(Node* a, Node* b) { |
| 365 | return raw_assembler_->IntPtrAdd(a, b); |
| 366 | } |
| 367 | |
| 368 | |
| 369 | Node* InterpreterAssembler::IntPtrSub(Node* a, Node* b) { |
| 370 | return raw_assembler_->IntPtrSub(a, b); |
| 371 | } |
| 372 | |
| 373 | |
| 374 | Node* InterpreterAssembler::WordShl(Node* value, int shift) { |
| 375 | return raw_assembler_->WordShl(value, Int32Constant(shift)); |
| 376 | } |
| 377 | |
| 378 | |
| 379 | Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 380 | Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 381 | BytecodeArray::kConstantPoolOffset); |
| 382 | Node* entry_offset = |
| 383 | IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 384 | WordShl(index, kPointerSizeLog2)); |
| 385 | return raw_assembler_->Load(MachineType::AnyTagged(), constant_pool, |
| 386 | entry_offset); |
| 387 | } |
| 388 | |
| 389 | |
| 390 | Node* InterpreterAssembler::LoadFixedArrayElement(Node* fixed_array, |
| 391 | int index) { |
| 392 | Node* entry_offset = |
| 393 | IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 394 | WordShl(Int32Constant(index), kPointerSizeLog2)); |
| 395 | return raw_assembler_->Load(MachineType::AnyTagged(), fixed_array, |
| 396 | entry_offset); |
| 397 | } |
| 398 | |
| 399 | |
| 400 | Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) { |
| 401 | return raw_assembler_->Load(MachineType::AnyTagged(), object, |
| 402 | IntPtrConstant(offset - kHeapObjectTag)); |
| 403 | } |
| 404 | |
| 405 | |
| 406 | Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { |
| 407 | return raw_assembler_->Load(MachineType::AnyTagged(), context, |
| 408 | IntPtrConstant(Context::SlotOffset(slot_index))); |
| 409 | } |
| 410 | |
| 411 | |
| 412 | Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { |
| 413 | Node* offset = |
| 414 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 415 | Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 416 | return raw_assembler_->Load(MachineType::AnyTagged(), context, offset); |
| 417 | } |
| 418 | |
| 419 | |
| 420 | Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, |
| 421 | Node* value) { |
| 422 | Node* offset = |
| 423 | IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 424 | Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 425 | return raw_assembler_->Store(MachineRepresentation::kTagged, context, offset, |
| 426 | value, kFullWriteBarrier); |
| 427 | } |
| 428 | |
| 429 | |
| 430 | Node* InterpreterAssembler::LoadTypeFeedbackVector() { |
| 431 | Node* function = raw_assembler_->Load( |
| 432 | MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 433 | IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer)); |
| 434 | Node* shared_info = |
| 435 | LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset); |
| 436 | Node* vector = |
| 437 | LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset); |
| 438 | return vector; |
| 439 | } |
| 440 | |
| 441 | |
| 442 | Node* InterpreterAssembler::Projection(int index, Node* node) { |
| 443 | return raw_assembler_->Projection(index, node); |
| 444 | } |
| 445 | |
| 446 | |
| 447 | Node* InterpreterAssembler::CallConstruct(Node* new_target, Node* constructor, |
| 448 | Node* first_arg, Node* arg_count) { |
| 449 | Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate()); |
| 450 | CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( |
| 451 | isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); |
| 452 | |
| 453 | Node* code_target = HeapConstant(callable.code()); |
| 454 | |
| 455 | Node** args = zone()->NewArray<Node*>(5); |
| 456 | args[0] = arg_count; |
| 457 | args[1] = new_target; |
| 458 | args[2] = constructor; |
| 459 | args[3] = first_arg; |
| 460 | args[4] = GetContext(); |
| 461 | |
| 462 | return CallN(descriptor, code_target, args); |
| 463 | } |
| 464 | |
| 465 | |
| 466 | void InterpreterAssembler::CallPrologue() { |
| 467 | StoreRegister(SmiTag(bytecode_offset_), |
| 468 | InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer); |
| 469 | } |
| 470 | |
| 471 | |
| 472 | void InterpreterAssembler::CallEpilogue() { |
| 473 | // Restore the bytecode offset from the stack frame. |
| 474 | bytecode_offset_ = SmiUntag(LoadRegister( |
| 475 | InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); |
| 476 | } |
| 477 | |
| 478 | |
| 479 | Node* InterpreterAssembler::CallN(CallDescriptor* descriptor, Node* code_target, |
| 480 | Node** args) { |
| 481 | CallPrologue(); |
| 482 | |
| 483 | Node* stack_pointer_before_call = nullptr; |
| 484 | if (FLAG_debug_code) { |
| 485 | stack_pointer_before_call = raw_assembler_->LoadStackPointer(); |
| 486 | } |
| 487 | Node* return_val = raw_assembler_->CallN(descriptor, code_target, args); |
| 488 | if (FLAG_debug_code) { |
| 489 | Node* stack_pointer_after_call = raw_assembler_->LoadStackPointer(); |
| 490 | AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 491 | kUnexpectedStackPointer); |
| 492 | } |
| 493 | |
| 494 | CallEpilogue(); |
| 495 | return return_val; |
| 496 | } |
| 497 | |
| 498 | |
| 499 | Node* InterpreterAssembler::CallJS(Node* function, Node* first_arg, |
| 500 | Node* arg_count) { |
| 501 | Callable callable = CodeFactory::InterpreterPushArgsAndCall(isolate()); |
| 502 | CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( |
| 503 | isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); |
| 504 | |
| 505 | Node* code_target = HeapConstant(callable.code()); |
| 506 | |
| 507 | Node** args = zone()->NewArray<Node*>(4); |
| 508 | args[0] = arg_count; |
| 509 | args[1] = first_arg; |
| 510 | args[2] = function; |
| 511 | args[3] = GetContext(); |
| 512 | |
| 513 | return CallN(descriptor, code_target, args); |
| 514 | } |
| 515 | |
| 516 | |
| 517 | Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, |
| 518 | Node* target, Node** args) { |
| 519 | CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( |
| 520 | isolate(), zone(), descriptor, 0, CallDescriptor::kNoFlags); |
| 521 | return CallN(call_descriptor, target, args); |
| 522 | } |
| 523 | |
| 524 | |
| 525 | Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, |
| 526 | Node* target, Node* arg1, Node* arg2, |
| 527 | Node* arg3) { |
| 528 | Node** args = zone()->NewArray<Node*>(4); |
| 529 | args[0] = arg1; |
| 530 | args[1] = arg2; |
| 531 | args[2] = arg3; |
| 532 | args[3] = GetContext(); |
| 533 | return CallIC(descriptor, target, args); |
| 534 | } |
| 535 | |
| 536 | |
| 537 | Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, |
| 538 | Node* target, Node* arg1, Node* arg2, |
| 539 | Node* arg3, Node* arg4) { |
| 540 | Node** args = zone()->NewArray<Node*>(5); |
| 541 | args[0] = arg1; |
| 542 | args[1] = arg2; |
| 543 | args[2] = arg3; |
| 544 | args[3] = arg4; |
| 545 | args[4] = GetContext(); |
| 546 | return CallIC(descriptor, target, args); |
| 547 | } |
| 548 | |
| 549 | |
| 550 | Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, |
| 551 | Node* target, Node* arg1, Node* arg2, |
| 552 | Node* arg3, Node* arg4, Node* arg5) { |
| 553 | Node** args = zone()->NewArray<Node*>(6); |
| 554 | args[0] = arg1; |
| 555 | args[1] = arg2; |
| 556 | args[2] = arg3; |
| 557 | args[3] = arg4; |
| 558 | args[4] = arg5; |
| 559 | args[5] = GetContext(); |
| 560 | return CallIC(descriptor, target, args); |
| 561 | } |
| 562 | |
| 563 | |
| 564 | Node* InterpreterAssembler::CallRuntime(Node* function_id, Node* first_arg, |
| 565 | Node* arg_count, int result_size) { |
| 566 | Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); |
| 567 | CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( |
| 568 | isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, |
| 569 | Operator::kNoProperties, MachineType::AnyTagged(), result_size); |
| 570 | Node* code_target = HeapConstant(callable.code()); |
| 571 | |
| 572 | // Get the function entry from the function id. |
| 573 | Node* function_table = raw_assembler_->ExternalConstant( |
| 574 | ExternalReference::runtime_function_table_address(isolate())); |
| 575 | Node* function_offset = raw_assembler_->Int32Mul( |
| 576 | function_id, Int32Constant(sizeof(Runtime::Function))); |
| 577 | Node* function = IntPtrAdd(function_table, function_offset); |
| 578 | Node* function_entry = |
| 579 | raw_assembler_->Load(MachineType::Pointer(), function, |
| 580 | Int32Constant(offsetof(Runtime::Function, entry))); |
| 581 | |
| 582 | Node** args = zone()->NewArray<Node*>(4); |
| 583 | args[0] = arg_count; |
| 584 | args[1] = first_arg; |
| 585 | args[2] = function_entry; |
| 586 | args[3] = GetContext(); |
| 587 | |
| 588 | return CallN(descriptor, code_target, args); |
| 589 | } |
| 590 | |
| 591 | |
| 592 | Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, |
| 593 | Node* arg1) { |
| 594 | CallPrologue(); |
| 595 | Node* return_val = |
| 596 | raw_assembler_->CallRuntime1(function_id, arg1, GetContext()); |
| 597 | CallEpilogue(); |
| 598 | return return_val; |
| 599 | } |
| 600 | |
| 601 | |
| 602 | Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, |
| 603 | Node* arg1, Node* arg2) { |
| 604 | CallPrologue(); |
| 605 | Node* return_val = |
| 606 | raw_assembler_->CallRuntime2(function_id, arg1, arg2, GetContext()); |
| 607 | CallEpilogue(); |
| 608 | return return_val; |
| 609 | } |
| 610 | |
| 611 | |
| 612 | Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, |
| 613 | Node* arg1, Node* arg2, Node* arg3, |
| 614 | Node* arg4) { |
| 615 | CallPrologue(); |
| 616 | Node* return_val = raw_assembler_->CallRuntime4(function_id, arg1, arg2, arg3, |
| 617 | arg4, GetContext()); |
| 618 | CallEpilogue(); |
| 619 | return return_val; |
| 620 | } |
| 621 | |
| 622 | |
| 623 | void InterpreterAssembler::Return() { |
| 624 | Node* exit_trampoline_code_object = |
| 625 | HeapConstant(isolate()->builtins()->InterpreterExitTrampoline()); |
| 626 | // If the order of the parameters you need to change the call signature below. |
| 627 | STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); |
| 628 | STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); |
| 629 | STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); |
| 630 | STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); |
| 631 | STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); |
| 632 | STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); |
| 633 | Node* args[] = { GetAccumulator(), |
| 634 | RegisterFileRawPointer(), |
| 635 | BytecodeOffset(), |
| 636 | BytecodeArrayTaggedPointer(), |
| 637 | DispatchTableRawPointer(), |
| 638 | GetContext() }; |
| 639 | raw_assembler_->TailCallN(call_descriptor(), exit_trampoline_code_object, |
| 640 | args); |
| 641 | } |
| 642 | |
| 643 | |
| 644 | Node* InterpreterAssembler::Advance(int delta) { |
| 645 | return IntPtrAdd(BytecodeOffset(), Int32Constant(delta)); |
| 646 | } |
| 647 | |
| 648 | |
| 649 | Node* InterpreterAssembler::Advance(Node* delta) { |
| 650 | return raw_assembler_->IntPtrAdd(BytecodeOffset(), delta); |
| 651 | } |
| 652 | |
| 653 | |
| 654 | void InterpreterAssembler::Jump(Node* delta) { DispatchTo(Advance(delta)); } |
| 655 | |
| 656 | |
| 657 | void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { |
| 658 | RawMachineLabel match, no_match; |
| 659 | Node* condition = raw_assembler_->WordEqual(lhs, rhs); |
| 660 | raw_assembler_->Branch(condition, &match, &no_match); |
| 661 | raw_assembler_->Bind(&match); |
| 662 | DispatchTo(Advance(delta)); |
| 663 | raw_assembler_->Bind(&no_match); |
| 664 | Dispatch(); |
| 665 | } |
| 666 | |
| 667 | |
| 668 | void InterpreterAssembler::Dispatch() { |
| 669 | DispatchTo(Advance(interpreter::Bytecodes::Size(bytecode_))); |
| 670 | } |
| 671 | |
| 672 | |
| 673 | void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { |
| 674 | Node* target_bytecode = raw_assembler_->Load( |
| 675 | MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); |
| 676 | |
| 677 | // TODO(rmcilroy): Create a code target dispatch table to avoid conversion |
| 678 | // from code object on every dispatch. |
| 679 | Node* target_code_object = raw_assembler_->Load( |
| 680 | MachineType::Pointer(), DispatchTableRawPointer(), |
| 681 | raw_assembler_->Word32Shl(target_bytecode, |
| 682 | Int32Constant(kPointerSizeLog2))); |
| 683 | |
| 684 | // If the order of the parameters you need to change the call signature below. |
| 685 | STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); |
| 686 | STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); |
| 687 | STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); |
| 688 | STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); |
| 689 | STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); |
| 690 | STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); |
| 691 | Node* args[] = { GetAccumulator(), |
| 692 | RegisterFileRawPointer(), |
| 693 | new_bytecode_offset, |
| 694 | BytecodeArrayTaggedPointer(), |
| 695 | DispatchTableRawPointer(), |
| 696 | GetContext() }; |
| 697 | raw_assembler_->TailCallN(call_descriptor(), target_code_object, args); |
| 698 | } |
| 699 | |
| 700 | |
| 701 | void InterpreterAssembler::Abort(BailoutReason bailout_reason) { |
| 702 | Node* abort_id = SmiTag(Int32Constant(bailout_reason)); |
| 703 | Node* ret_value = CallRuntime(Runtime::kAbort, abort_id); |
| 704 | // Unreached, but keeps turbofan happy. |
| 705 | raw_assembler_->Return(ret_value); |
| 706 | } |
| 707 | |
| 708 | |
| 709 | void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, |
| 710 | BailoutReason bailout_reason) { |
| 711 | RawMachineLabel match, no_match; |
| 712 | Node* condition = raw_assembler_->WordEqual(lhs, rhs); |
| 713 | raw_assembler_->Branch(condition, &match, &no_match); |
| 714 | raw_assembler_->Bind(&no_match); |
| 715 | Abort(bailout_reason); |
| 716 | raw_assembler_->Bind(&match); |
| 717 | } |
| 718 | |
| 719 | |
| 720 | // static |
| 721 | bool InterpreterAssembler::TargetSupportsUnalignedAccess() { |
| 722 | #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 |
| 723 | return false; |
| 724 | #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC |
| 725 | return CpuFeatures::IsSupported(UNALIGNED_ACCESSES); |
| 726 | #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 |
| 727 | return true; |
| 728 | #else |
| 729 | #error "Unknown Architecture" |
| 730 | #endif |
| 731 | } |
| 732 | |
| 733 | |
| 734 | // RawMachineAssembler delegate helpers: |
| 735 | Isolate* InterpreterAssembler::isolate() { return raw_assembler_->isolate(); } |
| 736 | |
| 737 | |
| 738 | Graph* InterpreterAssembler::graph() { return raw_assembler_->graph(); } |
| 739 | |
| 740 | |
| 741 | CallDescriptor* InterpreterAssembler::call_descriptor() const { |
| 742 | return raw_assembler_->call_descriptor(); |
| 743 | } |
| 744 | |
| 745 | |
| 746 | Zone* InterpreterAssembler::zone() { return raw_assembler_->zone(); } |
| 747 | |
| 748 | |
| 749 | } // namespace compiler |
| 750 | } // namespace internal |
| 751 | } // namespace v8 |