blob: 6d007841f42ad0b1dd010f6d03aa28adc9e62c4e [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_INSTRUCTION_H_
6#define V8_COMPILER_INSTRUCTION_H_
7
8#include <deque>
9#include <map>
10#include <set>
11
12#include "src/compiler/common-operator.h"
13#include "src/compiler/frame.h"
14#include "src/compiler/graph.h"
15#include "src/compiler/instruction-codes.h"
16#include "src/compiler/opcodes.h"
17#include "src/compiler/schedule.h"
18// TODO(titzer): don't include the macro-assembler?
19#include "src/macro-assembler.h"
20#include "src/zone-allocator.h"
21
22namespace v8 {
23namespace internal {
24
25// Forward declarations.
26class OStream;
27
28namespace compiler {
29
30// Forward declarations.
31class Linkage;
32
33// A couple of reserved opcodes are used for internal use.
34const InstructionCode kGapInstruction = -1;
35const InstructionCode kBlockStartInstruction = -2;
36const InstructionCode kSourcePositionInstruction = -3;
37
38
39#define INSTRUCTION_OPERAND_LIST(V) \
40 V(Constant, CONSTANT, 128) \
41 V(Immediate, IMMEDIATE, 128) \
42 V(StackSlot, STACK_SLOT, 128) \
43 V(DoubleStackSlot, DOUBLE_STACK_SLOT, 128) \
44 V(Register, REGISTER, Register::kNumRegisters) \
45 V(DoubleRegister, DOUBLE_REGISTER, DoubleRegister::kMaxNumRegisters)
46
47class InstructionOperand : public ZoneObject {
48 public:
49 enum Kind {
50 INVALID,
51 UNALLOCATED,
52 CONSTANT,
53 IMMEDIATE,
54 STACK_SLOT,
55 DOUBLE_STACK_SLOT,
56 REGISTER,
57 DOUBLE_REGISTER
58 };
59
60 InstructionOperand() : value_(KindField::encode(INVALID)) {}
61 InstructionOperand(Kind kind, int index) { ConvertTo(kind, index); }
62
63 Kind kind() const { return KindField::decode(value_); }
64 int index() const { return static_cast<int>(value_) >> KindField::kSize; }
65#define INSTRUCTION_OPERAND_PREDICATE(name, type, number) \
66 bool Is##name() const { return kind() == type; }
67 INSTRUCTION_OPERAND_LIST(INSTRUCTION_OPERAND_PREDICATE)
68 INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED, 0)
69 INSTRUCTION_OPERAND_PREDICATE(Ignored, INVALID, 0)
70#undef INSTRUCTION_OPERAND_PREDICATE
71 bool Equals(InstructionOperand* other) const {
72 return value_ == other->value_;
73 }
74
75 void ConvertTo(Kind kind, int index) {
76 if (kind == REGISTER || kind == DOUBLE_REGISTER) DCHECK(index >= 0);
77 value_ = KindField::encode(kind);
78 value_ |= index << KindField::kSize;
79 DCHECK(this->index() == index);
80 }
81
82 // Calls SetUpCache()/TearDownCache() for each subclass.
83 static void SetUpCaches();
84 static void TearDownCaches();
85
86 protected:
87 typedef BitField<Kind, 0, 3> KindField;
88
89 unsigned value_;
90};
91
92typedef ZoneVector<InstructionOperand*> InstructionOperandVector;
93
94OStream& operator<<(OStream& os, const InstructionOperand& op);
95
96class UnallocatedOperand : public InstructionOperand {
97 public:
98 enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
99
100 enum ExtendedPolicy {
101 NONE,
102 ANY,
103 FIXED_REGISTER,
104 FIXED_DOUBLE_REGISTER,
105 MUST_HAVE_REGISTER,
106 SAME_AS_FIRST_INPUT
107 };
108
109 // Lifetime of operand inside the instruction.
110 enum Lifetime {
111 // USED_AT_START operand is guaranteed to be live only at
112 // instruction start. Register allocator is free to assign the same register
113 // to some other operand used inside instruction (i.e. temporary or
114 // output).
115 USED_AT_START,
116
117 // USED_AT_END operand is treated as live until the end of
118 // instruction. This means that register allocator will not reuse it's
119 // register for any other operand inside instruction.
120 USED_AT_END
121 };
122
123 explicit UnallocatedOperand(ExtendedPolicy policy)
124 : InstructionOperand(UNALLOCATED, 0) {
125 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
126 value_ |= ExtendedPolicyField::encode(policy);
127 value_ |= LifetimeField::encode(USED_AT_END);
128 }
129
130 UnallocatedOperand(BasicPolicy policy, int index)
131 : InstructionOperand(UNALLOCATED, 0) {
132 DCHECK(policy == FIXED_SLOT);
133 value_ |= BasicPolicyField::encode(policy);
134 value_ |= index << FixedSlotIndexField::kShift;
135 DCHECK(this->fixed_slot_index() == index);
136 }
137
138 UnallocatedOperand(ExtendedPolicy policy, int index)
139 : InstructionOperand(UNALLOCATED, 0) {
140 DCHECK(policy == FIXED_REGISTER || policy == FIXED_DOUBLE_REGISTER);
141 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
142 value_ |= ExtendedPolicyField::encode(policy);
143 value_ |= LifetimeField::encode(USED_AT_END);
144 value_ |= FixedRegisterField::encode(index);
145 }
146
147 UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime)
148 : InstructionOperand(UNALLOCATED, 0) {
149 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
150 value_ |= ExtendedPolicyField::encode(policy);
151 value_ |= LifetimeField::encode(lifetime);
152 }
153
154 UnallocatedOperand* CopyUnconstrained(Zone* zone) {
155 UnallocatedOperand* result = new (zone) UnallocatedOperand(ANY);
156 result->set_virtual_register(virtual_register());
157 return result;
158 }
159
160 static const UnallocatedOperand* cast(const InstructionOperand* op) {
161 DCHECK(op->IsUnallocated());
162 return static_cast<const UnallocatedOperand*>(op);
163 }
164
165 static UnallocatedOperand* cast(InstructionOperand* op) {
166 DCHECK(op->IsUnallocated());
167 return static_cast<UnallocatedOperand*>(op);
168 }
169
170 // The encoding used for UnallocatedOperand operands depends on the policy
171 // that is
172 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
173 // because it accommodates a larger pay-load.
174 //
175 // For FIXED_SLOT policy:
176 // +------------------------------------------+
177 // | slot_index | vreg | 0 | 001 |
178 // +------------------------------------------+
179 //
180 // For all other (extended) policies:
181 // +------------------------------------------+
182 // | reg_index | L | PPP | vreg | 1 | 001 | L ... Lifetime
183 // +------------------------------------------+ P ... Policy
184 //
185 // The slot index is a signed value which requires us to decode it manually
186 // instead of using the BitField utility class.
187
188 // The superclass has a KindField.
189 STATIC_ASSERT(KindField::kSize == 3);
190
191 // BitFields for all unallocated operands.
192 class BasicPolicyField : public BitField<BasicPolicy, 3, 1> {};
193 class VirtualRegisterField : public BitField<unsigned, 4, 18> {};
194
195 // BitFields specific to BasicPolicy::FIXED_SLOT.
196 class FixedSlotIndexField : public BitField<int, 22, 10> {};
197
198 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
199 class ExtendedPolicyField : public BitField<ExtendedPolicy, 22, 3> {};
200 class LifetimeField : public BitField<Lifetime, 25, 1> {};
201 class FixedRegisterField : public BitField<int, 26, 6> {};
202
203 static const int kMaxVirtualRegisters = VirtualRegisterField::kMax + 1;
204 static const int kFixedSlotIndexWidth = FixedSlotIndexField::kSize;
205 static const int kMaxFixedSlotIndex = (1 << (kFixedSlotIndexWidth - 1)) - 1;
206 static const int kMinFixedSlotIndex = -(1 << (kFixedSlotIndexWidth - 1));
207
208 // Predicates for the operand policy.
209 bool HasAnyPolicy() const {
210 return basic_policy() == EXTENDED_POLICY && extended_policy() == ANY;
211 }
212 bool HasFixedPolicy() const {
213 return basic_policy() == FIXED_SLOT ||
214 extended_policy() == FIXED_REGISTER ||
215 extended_policy() == FIXED_DOUBLE_REGISTER;
216 }
217 bool HasRegisterPolicy() const {
218 return basic_policy() == EXTENDED_POLICY &&
219 extended_policy() == MUST_HAVE_REGISTER;
220 }
221 bool HasSameAsInputPolicy() const {
222 return basic_policy() == EXTENDED_POLICY &&
223 extended_policy() == SAME_AS_FIRST_INPUT;
224 }
225 bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
226 bool HasFixedRegisterPolicy() const {
227 return basic_policy() == EXTENDED_POLICY &&
228 extended_policy() == FIXED_REGISTER;
229 }
230 bool HasFixedDoubleRegisterPolicy() const {
231 return basic_policy() == EXTENDED_POLICY &&
232 extended_policy() == FIXED_DOUBLE_REGISTER;
233 }
234
235 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
236 BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
237
238 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
239 ExtendedPolicy extended_policy() const {
240 DCHECK(basic_policy() == EXTENDED_POLICY);
241 return ExtendedPolicyField::decode(value_);
242 }
243
244 // [fixed_slot_index]: Only for FIXED_SLOT.
245 int fixed_slot_index() const {
246 DCHECK(HasFixedSlotPolicy());
247 return static_cast<int>(value_) >> FixedSlotIndexField::kShift;
248 }
249
250 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_DOUBLE_REGISTER.
251 int fixed_register_index() const {
252 DCHECK(HasFixedRegisterPolicy() || HasFixedDoubleRegisterPolicy());
253 return FixedRegisterField::decode(value_);
254 }
255
256 // [virtual_register]: The virtual register ID for this operand.
257 int virtual_register() const { return VirtualRegisterField::decode(value_); }
258 void set_virtual_register(unsigned id) {
259 value_ = VirtualRegisterField::update(value_, id);
260 }
261
262 // [lifetime]: Only for non-FIXED_SLOT.
263 bool IsUsedAtStart() {
264 DCHECK(basic_policy() == EXTENDED_POLICY);
265 return LifetimeField::decode(value_) == USED_AT_START;
266 }
267};
268
269
270class MoveOperands FINAL {
271 public:
272 MoveOperands(InstructionOperand* source, InstructionOperand* destination)
273 : source_(source), destination_(destination) {}
274
275 InstructionOperand* source() const { return source_; }
276 void set_source(InstructionOperand* operand) { source_ = operand; }
277
278 InstructionOperand* destination() const { return destination_; }
279 void set_destination(InstructionOperand* operand) { destination_ = operand; }
280
281 // The gap resolver marks moves as "in-progress" by clearing the
282 // destination (but not the source).
283 bool IsPending() const { return destination_ == NULL && source_ != NULL; }
284
285 // True if this move a move into the given destination operand.
286 bool Blocks(InstructionOperand* operand) const {
287 return !IsEliminated() && source()->Equals(operand);
288 }
289
290 // A move is redundant if it's been eliminated, if its source and
291 // destination are the same, or if its destination is unneeded or constant.
292 bool IsRedundant() const {
293 return IsEliminated() || source_->Equals(destination_) || IsIgnored() ||
294 (destination_ != NULL && destination_->IsConstant());
295 }
296
297 bool IsIgnored() const {
298 return destination_ != NULL && destination_->IsIgnored();
299 }
300
301 // We clear both operands to indicate move that's been eliminated.
302 void Eliminate() { source_ = destination_ = NULL; }
303 bool IsEliminated() const {
304 DCHECK(source_ != NULL || destination_ == NULL);
305 return source_ == NULL;
306 }
307
308 private:
309 InstructionOperand* source_;
310 InstructionOperand* destination_;
311};
312
313OStream& operator<<(OStream& os, const MoveOperands& mo);
314
315template <InstructionOperand::Kind kOperandKind, int kNumCachedOperands>
316class SubKindOperand FINAL : public InstructionOperand {
317 public:
318 static SubKindOperand* Create(int index, Zone* zone) {
319 DCHECK(index >= 0);
320 if (index < kNumCachedOperands) return &cache[index];
321 return new (zone) SubKindOperand(index);
322 }
323
324 static SubKindOperand* cast(InstructionOperand* op) {
325 DCHECK(op->kind() == kOperandKind);
326 return reinterpret_cast<SubKindOperand*>(op);
327 }
328
329 static void SetUpCache();
330 static void TearDownCache();
331
332 private:
333 static SubKindOperand* cache;
334
335 SubKindOperand() : InstructionOperand() {}
336 explicit SubKindOperand(int index)
337 : InstructionOperand(kOperandKind, index) {}
338};
339
340
341#define INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS(name, type, number) \
342 typedef SubKindOperand<InstructionOperand::type, number> name##Operand;
343INSTRUCTION_OPERAND_LIST(INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS)
344#undef INSTRUCTION_TYPEDEF_SUBKIND_OPERAND_CLASS
345
346
347class ParallelMove FINAL : public ZoneObject {
348 public:
349 explicit ParallelMove(Zone* zone) : move_operands_(4, zone) {}
350
351 void AddMove(InstructionOperand* from, InstructionOperand* to, Zone* zone) {
352 move_operands_.Add(MoveOperands(from, to), zone);
353 }
354
355 bool IsRedundant() const;
356
357 ZoneList<MoveOperands>* move_operands() { return &move_operands_; }
358 const ZoneList<MoveOperands>* move_operands() const {
359 return &move_operands_;
360 }
361
362 private:
363 ZoneList<MoveOperands> move_operands_;
364};
365
366OStream& operator<<(OStream& os, const ParallelMove& pm);
367
368class PointerMap FINAL : public ZoneObject {
369 public:
370 explicit PointerMap(Zone* zone)
371 : pointer_operands_(8, zone),
372 untagged_operands_(0, zone),
373 instruction_position_(-1) {}
374
375 const ZoneList<InstructionOperand*>* GetNormalizedOperands() {
376 for (int i = 0; i < untagged_operands_.length(); ++i) {
377 RemovePointer(untagged_operands_[i]);
378 }
379 untagged_operands_.Clear();
380 return &pointer_operands_;
381 }
382 int instruction_position() const { return instruction_position_; }
383
384 void set_instruction_position(int pos) {
385 DCHECK(instruction_position_ == -1);
386 instruction_position_ = pos;
387 }
388
389 void RecordPointer(InstructionOperand* op, Zone* zone);
390 void RemovePointer(InstructionOperand* op);
391 void RecordUntagged(InstructionOperand* op, Zone* zone);
392
393 private:
394 friend OStream& operator<<(OStream& os, const PointerMap& pm);
395
396 ZoneList<InstructionOperand*> pointer_operands_;
397 ZoneList<InstructionOperand*> untagged_operands_;
398 int instruction_position_;
399};
400
401OStream& operator<<(OStream& os, const PointerMap& pm);
402
403// TODO(titzer): s/PointerMap/ReferenceMap/
404class Instruction : public ZoneObject {
405 public:
406 size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
407 InstructionOperand* OutputAt(size_t i) const {
408 DCHECK(i < OutputCount());
409 return operands_[i];
410 }
411
412 bool HasOutput() const { return OutputCount() == 1; }
413 InstructionOperand* Output() const { return OutputAt(0); }
414
415 size_t InputCount() const { return InputCountField::decode(bit_field_); }
416 InstructionOperand* InputAt(size_t i) const {
417 DCHECK(i < InputCount());
418 return operands_[OutputCount() + i];
419 }
420
421 size_t TempCount() const { return TempCountField::decode(bit_field_); }
422 InstructionOperand* TempAt(size_t i) const {
423 DCHECK(i < TempCount());
424 return operands_[OutputCount() + InputCount() + i];
425 }
426
427 InstructionCode opcode() const { return opcode_; }
428 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
429 AddressingMode addressing_mode() const {
430 return AddressingModeField::decode(opcode());
431 }
432 FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
433 FlagsCondition flags_condition() const {
434 return FlagsConditionField::decode(opcode());
435 }
436
437 // TODO(titzer): make control and call into flags.
438 static Instruction* New(Zone* zone, InstructionCode opcode) {
439 return New(zone, opcode, 0, NULL, 0, NULL, 0, NULL);
440 }
441
442 static Instruction* New(Zone* zone, InstructionCode opcode,
443 size_t output_count, InstructionOperand** outputs,
444 size_t input_count, InstructionOperand** inputs,
445 size_t temp_count, InstructionOperand** temps) {
446 DCHECK(opcode >= 0);
447 DCHECK(output_count == 0 || outputs != NULL);
448 DCHECK(input_count == 0 || inputs != NULL);
449 DCHECK(temp_count == 0 || temps != NULL);
450 InstructionOperand* none = NULL;
451 USE(none);
452 int size = static_cast<int>(RoundUp(sizeof(Instruction), kPointerSize) +
453 (output_count + input_count + temp_count - 1) *
454 sizeof(none));
455 return new (zone->New(size)) Instruction(
456 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
457 }
458
459 // TODO(titzer): another holdover from lithium days; register allocator
460 // should not need to know about control instructions.
461 Instruction* MarkAsControl() {
462 bit_field_ = IsControlField::update(bit_field_, true);
463 return this;
464 }
465 Instruction* MarkAsCall() {
466 bit_field_ = IsCallField::update(bit_field_, true);
467 return this;
468 }
469 bool IsControl() const { return IsControlField::decode(bit_field_); }
470 bool IsCall() const { return IsCallField::decode(bit_field_); }
471 bool NeedsPointerMap() const { return IsCall(); }
472 bool HasPointerMap() const { return pointer_map_ != NULL; }
473
474 bool IsGapMoves() const {
475 return opcode() == kGapInstruction || opcode() == kBlockStartInstruction;
476 }
477 bool IsBlockStart() const { return opcode() == kBlockStartInstruction; }
478 bool IsSourcePosition() const {
479 return opcode() == kSourcePositionInstruction;
480 }
481
482 bool ClobbersRegisters() const { return IsCall(); }
483 bool ClobbersTemps() const { return IsCall(); }
484 bool ClobbersDoubleRegisters() const { return IsCall(); }
485 PointerMap* pointer_map() const { return pointer_map_; }
486
487 void set_pointer_map(PointerMap* map) {
488 DCHECK(NeedsPointerMap());
489 DCHECK_EQ(NULL, pointer_map_);
490 pointer_map_ = map;
491 }
492
493 // Placement new operator so that we can smash instructions into
494 // zone-allocated memory.
495 void* operator new(size_t, void* location) { return location; }
496
497 void operator delete(void* pointer, void* location) { UNREACHABLE(); }
498
499 protected:
500 explicit Instruction(InstructionCode opcode)
501 : opcode_(opcode),
502 bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
503 TempCountField::encode(0) | IsCallField::encode(false) |
504 IsControlField::encode(false)),
505 pointer_map_(NULL) {}
506
507 Instruction(InstructionCode opcode, size_t output_count,
508 InstructionOperand** outputs, size_t input_count,
509 InstructionOperand** inputs, size_t temp_count,
510 InstructionOperand** temps)
511 : opcode_(opcode),
512 bit_field_(OutputCountField::encode(output_count) |
513 InputCountField::encode(input_count) |
514 TempCountField::encode(temp_count) |
515 IsCallField::encode(false) | IsControlField::encode(false)),
516 pointer_map_(NULL) {
517 for (size_t i = 0; i < output_count; ++i) {
518 operands_[i] = outputs[i];
519 }
520 for (size_t i = 0; i < input_count; ++i) {
521 operands_[output_count + i] = inputs[i];
522 }
523 for (size_t i = 0; i < temp_count; ++i) {
524 operands_[output_count + input_count + i] = temps[i];
525 }
526 }
527
528 protected:
529 typedef BitField<size_t, 0, 8> OutputCountField;
530 typedef BitField<size_t, 8, 16> InputCountField;
531 typedef BitField<size_t, 24, 6> TempCountField;
532 typedef BitField<bool, 30, 1> IsCallField;
533 typedef BitField<bool, 31, 1> IsControlField;
534
535 InstructionCode opcode_;
536 uint32_t bit_field_;
537 PointerMap* pointer_map_;
538 InstructionOperand* operands_[1];
539};
540
541OStream& operator<<(OStream& os, const Instruction& instr);
542
543// Represents moves inserted before an instruction due to register allocation.
544// TODO(titzer): squash GapInstruction back into Instruction, since essentially
545// every instruction can possibly have moves inserted before it.
546class GapInstruction : public Instruction {
547 public:
548 enum InnerPosition {
549 BEFORE,
550 START,
551 END,
552 AFTER,
553 FIRST_INNER_POSITION = BEFORE,
554 LAST_INNER_POSITION = AFTER
555 };
556
557 ParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
558 if (parallel_moves_[pos] == NULL) {
559 parallel_moves_[pos] = new (zone) ParallelMove(zone);
560 }
561 return parallel_moves_[pos];
562 }
563
564 ParallelMove* GetParallelMove(InnerPosition pos) {
565 return parallel_moves_[pos];
566 }
567
568 static GapInstruction* New(Zone* zone) {
569 void* buffer = zone->New(sizeof(GapInstruction));
570 return new (buffer) GapInstruction(kGapInstruction);
571 }
572
573 static GapInstruction* cast(Instruction* instr) {
574 DCHECK(instr->IsGapMoves());
575 return static_cast<GapInstruction*>(instr);
576 }
577
578 static const GapInstruction* cast(const Instruction* instr) {
579 DCHECK(instr->IsGapMoves());
580 return static_cast<const GapInstruction*>(instr);
581 }
582
583 protected:
584 explicit GapInstruction(InstructionCode opcode) : Instruction(opcode) {
585 parallel_moves_[BEFORE] = NULL;
586 parallel_moves_[START] = NULL;
587 parallel_moves_[END] = NULL;
588 parallel_moves_[AFTER] = NULL;
589 }
590
591 private:
592 friend OStream& operator<<(OStream& os, const Instruction& instr);
593 ParallelMove* parallel_moves_[LAST_INNER_POSITION + 1];
594};
595
596
597// This special kind of gap move instruction represents the beginning of a
598// block of code.
599// TODO(titzer): move code_start and code_end from BasicBlock to here.
600class BlockStartInstruction FINAL : public GapInstruction {
601 public:
602 BasicBlock* block() const { return block_; }
603 Label* label() { return &label_; }
604
605 static BlockStartInstruction* New(Zone* zone, BasicBlock* block) {
606 void* buffer = zone->New(sizeof(BlockStartInstruction));
607 return new (buffer) BlockStartInstruction(block);
608 }
609
610 static BlockStartInstruction* cast(Instruction* instr) {
611 DCHECK(instr->IsBlockStart());
612 return static_cast<BlockStartInstruction*>(instr);
613 }
614
615 private:
616 explicit BlockStartInstruction(BasicBlock* block)
617 : GapInstruction(kBlockStartInstruction), block_(block) {}
618
619 BasicBlock* block_;
620 Label label_;
621};
622
623
624class SourcePositionInstruction FINAL : public Instruction {
625 public:
626 static SourcePositionInstruction* New(Zone* zone, SourcePosition position) {
627 void* buffer = zone->New(sizeof(SourcePositionInstruction));
628 return new (buffer) SourcePositionInstruction(position);
629 }
630
631 SourcePosition source_position() const { return source_position_; }
632
633 static SourcePositionInstruction* cast(Instruction* instr) {
634 DCHECK(instr->IsSourcePosition());
635 return static_cast<SourcePositionInstruction*>(instr);
636 }
637
638 static const SourcePositionInstruction* cast(const Instruction* instr) {
639 DCHECK(instr->IsSourcePosition());
640 return static_cast<const SourcePositionInstruction*>(instr);
641 }
642
643 private:
644 explicit SourcePositionInstruction(SourcePosition source_position)
645 : Instruction(kSourcePositionInstruction),
646 source_position_(source_position) {
647 DCHECK(!source_position_.IsInvalid());
648 DCHECK(!source_position_.IsUnknown());
649 }
650
651 SourcePosition source_position_;
652};
653
654
655class Constant FINAL {
656 public:
657 enum Type { kInt32, kInt64, kFloat64, kExternalReference, kHeapObject };
658
659 explicit Constant(int32_t v) : type_(kInt32), value_(v) {}
660 explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
661 explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
662 explicit Constant(ExternalReference ref)
663 : type_(kExternalReference), value_(bit_cast<intptr_t>(ref)) {}
664 explicit Constant(Handle<HeapObject> obj)
665 : type_(kHeapObject), value_(bit_cast<intptr_t>(obj)) {}
666
667 Type type() const { return type_; }
668
669 int32_t ToInt32() const {
670 DCHECK_EQ(kInt32, type());
671 return static_cast<int32_t>(value_);
672 }
673
674 int64_t ToInt64() const {
675 if (type() == kInt32) return ToInt32();
676 DCHECK_EQ(kInt64, type());
677 return value_;
678 }
679
680 double ToFloat64() const {
681 if (type() == kInt32) return ToInt32();
682 DCHECK_EQ(kFloat64, type());
683 return bit_cast<double>(value_);
684 }
685
686 ExternalReference ToExternalReference() const {
687 DCHECK_EQ(kExternalReference, type());
688 return bit_cast<ExternalReference>(static_cast<intptr_t>(value_));
689 }
690
691 Handle<HeapObject> ToHeapObject() const {
692 DCHECK_EQ(kHeapObject, type());
693 return bit_cast<Handle<HeapObject> >(static_cast<intptr_t>(value_));
694 }
695
696 private:
697 Type type_;
698 int64_t value_;
699};
700
701
702class FrameStateDescriptor : public ZoneObject {
703 public:
704 FrameStateDescriptor(const FrameStateCallInfo& state_info,
705 size_t parameters_count, size_t locals_count,
706 size_t stack_count,
707 FrameStateDescriptor* outer_state = NULL)
708 : type_(state_info.type()),
709 bailout_id_(state_info.bailout_id()),
710 frame_state_combine_(state_info.state_combine()),
711 parameters_count_(parameters_count),
712 locals_count_(locals_count),
713 stack_count_(stack_count),
714 outer_state_(outer_state),
715 jsfunction_(state_info.jsfunction()) {}
716
717 FrameStateType type() const { return type_; }
718 BailoutId bailout_id() const { return bailout_id_; }
719 OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
720 size_t parameters_count() const { return parameters_count_; }
721 size_t locals_count() const { return locals_count_; }
722 size_t stack_count() const { return stack_count_; }
723 FrameStateDescriptor* outer_state() const { return outer_state_; }
724 MaybeHandle<JSFunction> jsfunction() const { return jsfunction_; }
725
726 size_t size() const {
727 return parameters_count_ + locals_count_ + stack_count_ +
728 (HasContext() ? 1 : 0);
729 }
730
731 size_t GetTotalSize() const {
732 size_t total_size = 0;
733 for (const FrameStateDescriptor* iter = this; iter != NULL;
734 iter = iter->outer_state_) {
735 total_size += iter->size();
736 }
737 return total_size;
738 }
739
740 size_t GetHeight(OutputFrameStateCombine override) const {
741 size_t height = size() - parameters_count();
742 switch (override) {
743 case kPushOutput:
744 ++height;
745 break;
746 case kIgnoreOutput:
747 break;
748 }
749 return height;
750 }
751
752 size_t GetFrameCount() const {
753 size_t count = 0;
754 for (const FrameStateDescriptor* iter = this; iter != NULL;
755 iter = iter->outer_state_) {
756 ++count;
757 }
758 return count;
759 }
760
761 size_t GetJSFrameCount() const {
762 size_t count = 0;
763 for (const FrameStateDescriptor* iter = this; iter != NULL;
764 iter = iter->outer_state_) {
765 if (iter->type_ == JS_FRAME) {
766 ++count;
767 }
768 }
769 return count;
770 }
771
772 bool HasContext() const { return type_ == JS_FRAME; }
773
774 private:
775 FrameStateType type_;
776 BailoutId bailout_id_;
777 OutputFrameStateCombine frame_state_combine_;
778 size_t parameters_count_;
779 size_t locals_count_;
780 size_t stack_count_;
781 FrameStateDescriptor* outer_state_;
782 MaybeHandle<JSFunction> jsfunction_;
783};
784
785OStream& operator<<(OStream& os, const Constant& constant);
786
787typedef ZoneDeque<Constant> ConstantDeque;
788typedef std::map<int, Constant, std::less<int>,
789 zone_allocator<std::pair<int, Constant> > > ConstantMap;
790
791typedef ZoneDeque<Instruction*> InstructionDeque;
792typedef ZoneDeque<PointerMap*> PointerMapDeque;
793typedef ZoneVector<FrameStateDescriptor*> DeoptimizationVector;
794
795// Represents architecture-specific generated code before, during, and after
796// register allocation.
797// TODO(titzer): s/IsDouble/IsFloat64/
798class InstructionSequence FINAL {
799 public:
800 InstructionSequence(Linkage* linkage, Graph* graph, Schedule* schedule)
801 : graph_(graph),
802 linkage_(linkage),
803 schedule_(schedule),
804 constants_(ConstantMap::key_compare(),
805 ConstantMap::allocator_type(zone())),
806 immediates_(zone()),
807 instructions_(zone()),
808 next_virtual_register_(graph->NodeCount()),
809 pointer_maps_(zone()),
810 doubles_(std::less<int>(), VirtualRegisterSet::allocator_type(zone())),
811 references_(std::less<int>(),
812 VirtualRegisterSet::allocator_type(zone())),
813 deoptimization_entries_(zone()) {}
814
815 int NextVirtualRegister() { return next_virtual_register_++; }
816 int VirtualRegisterCount() const { return next_virtual_register_; }
817
818 int ValueCount() const { return graph_->NodeCount(); }
819
820 int BasicBlockCount() const {
821 return static_cast<int>(schedule_->rpo_order()->size());
822 }
823
824 BasicBlock* BlockAt(int rpo_number) const {
825 return (*schedule_->rpo_order())[rpo_number];
826 }
827
828 BasicBlock* GetContainingLoop(BasicBlock* block) {
829 return block->loop_header_;
830 }
831
832 int GetLoopEnd(BasicBlock* block) const { return block->loop_end_; }
833
834 BasicBlock* GetBasicBlock(int instruction_index);
835
836 int GetVirtualRegister(Node* node) const { return node->id(); }
837
838 bool IsReference(int virtual_register) const;
839 bool IsDouble(int virtual_register) const;
840
841 void MarkAsReference(int virtual_register);
842 void MarkAsDouble(int virtual_register);
843
844 void AddGapMove(int index, InstructionOperand* from, InstructionOperand* to);
845
846 Label* GetLabel(BasicBlock* block);
847 BlockStartInstruction* GetBlockStart(BasicBlock* block);
848
849 typedef InstructionDeque::const_iterator const_iterator;
850 const_iterator begin() const { return instructions_.begin(); }
851 const_iterator end() const { return instructions_.end(); }
852
853 GapInstruction* GapAt(int index) const {
854 return GapInstruction::cast(InstructionAt(index));
855 }
856 bool IsGapAt(int index) const { return InstructionAt(index)->IsGapMoves(); }
857 Instruction* InstructionAt(int index) const {
858 DCHECK(index >= 0);
859 DCHECK(index < static_cast<int>(instructions_.size()));
860 return instructions_[index];
861 }
862
863 Frame* frame() { return &frame_; }
864 Graph* graph() const { return graph_; }
865 Isolate* isolate() const { return zone()->isolate(); }
866 Linkage* linkage() const { return linkage_; }
867 Schedule* schedule() const { return schedule_; }
868 const PointerMapDeque* pointer_maps() const { return &pointer_maps_; }
869 Zone* zone() const { return graph_->zone(); }
870
871 // Used by the code generator while adding instructions.
872 int AddInstruction(Instruction* instr, BasicBlock* block);
873 void StartBlock(BasicBlock* block);
874 void EndBlock(BasicBlock* block);
875
876 void AddConstant(int virtual_register, Constant constant) {
877 DCHECK(constants_.find(virtual_register) == constants_.end());
878 constants_.insert(std::make_pair(virtual_register, constant));
879 }
880 Constant GetConstant(int virtual_register) const {
881 ConstantMap::const_iterator it = constants_.find(virtual_register);
882 DCHECK(it != constants_.end());
883 DCHECK_EQ(virtual_register, it->first);
884 return it->second;
885 }
886
887 typedef ConstantDeque Immediates;
888 const Immediates& immediates() const { return immediates_; }
889
890 int AddImmediate(Constant constant) {
891 int index = static_cast<int>(immediates_.size());
892 immediates_.push_back(constant);
893 return index;
894 }
895 Constant GetImmediate(int index) const {
896 DCHECK(index >= 0);
897 DCHECK(index < static_cast<int>(immediates_.size()));
898 return immediates_[index];
899 }
900
901 class StateId {
902 public:
903 static StateId FromInt(int id) { return StateId(id); }
904 int ToInt() const { return id_; }
905
906 private:
907 explicit StateId(int id) : id_(id) {}
908 int id_;
909 };
910
911 StateId AddFrameStateDescriptor(FrameStateDescriptor* descriptor);
912 FrameStateDescriptor* GetFrameStateDescriptor(StateId deoptimization_id);
913 int GetFrameStateDescriptorCount();
914
915 private:
916 friend OStream& operator<<(OStream& os, const InstructionSequence& code);
917
918 typedef std::set<int, std::less<int>, ZoneIntAllocator> VirtualRegisterSet;
919
920 Graph* graph_;
921 Linkage* linkage_;
922 Schedule* schedule_;
923 ConstantMap constants_;
924 ConstantDeque immediates_;
925 InstructionDeque instructions_;
926 int next_virtual_register_;
927 PointerMapDeque pointer_maps_;
928 VirtualRegisterSet doubles_;
929 VirtualRegisterSet references_;
930 Frame frame_;
931 DeoptimizationVector deoptimization_entries_;
932};
933
934OStream& operator<<(OStream& os, const InstructionSequence& code);
935
936} // namespace compiler
937} // namespace internal
938} // namespace v8
939
940#endif // V8_COMPILER_INSTRUCTION_H_