Update V8 to r6768 as required by WebKit r78450

Change-Id: Ib8868ff7147a76547a8d1d85f257ebe8546a3d3f
diff --git a/src/lithium.h b/src/lithium.h
index 5f7c92f..a2f9df0 100644
--- a/src/lithium.h
+++ b/src/lithium.h
@@ -29,12 +29,360 @@
 #define V8_LITHIUM_H_
 
 #include "hydrogen.h"
-#include "lithium-allocator.h"
 #include "safepoint-table.h"
 
 namespace v8 {
 namespace internal {
 
+class LOperand: public ZoneObject {
+ public:
+  enum Kind {
+    INVALID,
+    UNALLOCATED,
+    CONSTANT_OPERAND,
+    STACK_SLOT,
+    DOUBLE_STACK_SLOT,
+    REGISTER,
+    DOUBLE_REGISTER,
+    ARGUMENT
+  };
+
+  LOperand() : value_(KindField::encode(INVALID)) { }
+
+  Kind kind() const { return KindField::decode(value_); }
+  int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
+  bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
+  bool IsStackSlot() const { return kind() == STACK_SLOT; }
+  bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
+  bool IsRegister() const { return kind() == REGISTER; }
+  bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
+  bool IsArgument() const { return kind() == ARGUMENT; }
+  bool IsUnallocated() const { return kind() == UNALLOCATED; }
+  bool Equals(LOperand* other) const { return value_ == other->value_; }
+  int VirtualRegister();
+
+  void PrintTo(StringStream* stream);
+  void ConvertTo(Kind kind, int index) {
+    value_ = KindField::encode(kind);
+    value_ |= index << kKindFieldWidth;
+    ASSERT(this->index() == index);
+  }
+
+ protected:
+  static const int kKindFieldWidth = 3;
+  class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
+
+  LOperand(Kind kind, int index) { ConvertTo(kind, index); }
+
+  unsigned value_;
+};
+
+
+class LUnallocated: public LOperand {
+ public:
+  enum Policy {
+    NONE,
+    ANY,
+    FIXED_REGISTER,
+    FIXED_DOUBLE_REGISTER,
+    FIXED_SLOT,
+    MUST_HAVE_REGISTER,
+    WRITABLE_REGISTER,
+    SAME_AS_FIRST_INPUT,
+    IGNORE
+  };
+
+  // Lifetime of operand inside the instruction.
+  enum Lifetime {
+    // USED_AT_START operand is guaranteed to be live only at
+    // instruction start. Register allocator is free to assign the same register
+    // to some other operand used inside instruction (i.e. temporary or
+    // output).
+    USED_AT_START,
+
+    // USED_AT_END operand is treated as live until the end of
+    // instruction. This means that register allocator will not reuse it's
+    // register for any other operand inside instruction.
+    USED_AT_END
+  };
+
+  explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
+    Initialize(policy, 0, USED_AT_END);
+  }
+
+  LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
+    Initialize(policy, fixed_index, USED_AT_END);
+  }
+
+  LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
+    Initialize(policy, 0, lifetime);
+  }
+
+  // The superclass has a KindField.  Some policies have a signed fixed
+  // index in the upper bits.
+  static const int kPolicyWidth = 4;
+  static const int kLifetimeWidth = 1;
+  static const int kVirtualRegisterWidth = 17;
+
+  static const int kPolicyShift = kKindFieldWidth;
+  static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
+  static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
+  static const int kFixedIndexShift =
+      kVirtualRegisterShift + kVirtualRegisterWidth;
+
+  class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
+
+  class LifetimeField
+      : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
+  };
+
+  class VirtualRegisterField
+      : public BitField<unsigned,
+                        kVirtualRegisterShift,
+                        kVirtualRegisterWidth> {
+  };
+
+  static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
+  static const int kMaxFixedIndices = 128;
+
+  bool HasIgnorePolicy() const { return policy() == IGNORE; }
+  bool HasNoPolicy() const { return policy() == NONE; }
+  bool HasAnyPolicy() const {
+    return policy() == ANY;
+  }
+  bool HasFixedPolicy() const {
+    return policy() == FIXED_REGISTER ||
+        policy() == FIXED_DOUBLE_REGISTER ||
+        policy() == FIXED_SLOT;
+  }
+  bool HasRegisterPolicy() const {
+    return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
+  }
+  bool HasSameAsInputPolicy() const {
+    return policy() == SAME_AS_FIRST_INPUT;
+  }
+  Policy policy() const { return PolicyField::decode(value_); }
+  void set_policy(Policy policy) {
+    value_ &= ~PolicyField::mask();
+    value_ |= PolicyField::encode(policy);
+  }
+  int fixed_index() const {
+    return static_cast<int>(value_) >> kFixedIndexShift;
+  }
+
+  unsigned virtual_register() const {
+    return VirtualRegisterField::decode(value_);
+  }
+
+  void set_virtual_register(unsigned id) {
+    value_ &= ~VirtualRegisterField::mask();
+    value_ |= VirtualRegisterField::encode(id);
+  }
+
+  LUnallocated* CopyUnconstrained() {
+    LUnallocated* result = new LUnallocated(ANY);
+    result->set_virtual_register(virtual_register());
+    return result;
+  }
+
+  static LUnallocated* cast(LOperand* op) {
+    ASSERT(op->IsUnallocated());
+    return reinterpret_cast<LUnallocated*>(op);
+  }
+
+  bool IsUsedAtStart() {
+    return LifetimeField::decode(value_) == USED_AT_START;
+  }
+
+ private:
+  void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
+    value_ |= PolicyField::encode(policy);
+    value_ |= LifetimeField::encode(lifetime);
+    value_ |= fixed_index << kFixedIndexShift;
+    ASSERT(this->fixed_index() == fixed_index);
+  }
+};
+
+
+class LMoveOperands BASE_EMBEDDED {
+ public:
+  LMoveOperands(LOperand* source, LOperand* destination)
+      : source_(source), destination_(destination) {
+  }
+
+  LOperand* source() const { return source_; }
+  void set_source(LOperand* operand) { source_ = operand; }
+
+  LOperand* destination() const { return destination_; }
+  void set_destination(LOperand* operand) { destination_ = operand; }
+
+  // The gap resolver marks moves as "in-progress" by clearing the
+  // destination (but not the source).
+  bool IsPending() const {
+    return destination_ == NULL && source_ != NULL;
+  }
+
+  // True if this move a move into the given destination operand.
+  bool Blocks(LOperand* operand) const {
+    return !IsEliminated() && source()->Equals(operand);
+  }
+
+  // A move is redundant if it's been eliminated, if its source and
+  // destination are the same, or if its destination is unneeded.
+  bool IsRedundant() const {
+    return IsEliminated() || source_->Equals(destination_) || IsIgnored();
+  }
+
+  bool IsIgnored() const {
+    return destination_ != NULL &&
+        destination_->IsUnallocated() &&
+        LUnallocated::cast(destination_)->HasIgnorePolicy();
+  }
+
+  // We clear both operands to indicate move that's been eliminated.
+  void Eliminate() { source_ = destination_ = NULL; }
+  bool IsEliminated() const {
+    ASSERT(source_ != NULL || destination_ == NULL);
+    return source_ == NULL;
+  }
+
+ private:
+  LOperand* source_;
+  LOperand* destination_;
+};
+
+
+class LConstantOperand: public LOperand {
+ public:
+  static LConstantOperand* Create(int index) {
+    ASSERT(index >= 0);
+    if (index < kNumCachedOperands) return &cache[index];
+    return new LConstantOperand(index);
+  }
+
+  static LConstantOperand* cast(LOperand* op) {
+    ASSERT(op->IsConstantOperand());
+    return reinterpret_cast<LConstantOperand*>(op);
+  }
+
+  static void SetupCache();
+
+ private:
+  static const int kNumCachedOperands = 128;
+  static LConstantOperand cache[];
+
+  LConstantOperand() : LOperand() { }
+  explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
+};
+
+
+class LArgument: public LOperand {
+ public:
+  explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
+
+  static LArgument* cast(LOperand* op) {
+    ASSERT(op->IsArgument());
+    return reinterpret_cast<LArgument*>(op);
+  }
+};
+
+
+class LStackSlot: public LOperand {
+ public:
+  static LStackSlot* Create(int index) {
+    ASSERT(index >= 0);
+    if (index < kNumCachedOperands) return &cache[index];
+    return new LStackSlot(index);
+  }
+
+  static LStackSlot* cast(LOperand* op) {
+    ASSERT(op->IsStackSlot());
+    return reinterpret_cast<LStackSlot*>(op);
+  }
+
+  static void SetupCache();
+
+ private:
+  static const int kNumCachedOperands = 128;
+  static LStackSlot cache[];
+
+  LStackSlot() : LOperand() { }
+  explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
+};
+
+
+class LDoubleStackSlot: public LOperand {
+ public:
+  static LDoubleStackSlot* Create(int index) {
+    ASSERT(index >= 0);
+    if (index < kNumCachedOperands) return &cache[index];
+    return new LDoubleStackSlot(index);
+  }
+
+  static LDoubleStackSlot* cast(LOperand* op) {
+    ASSERT(op->IsStackSlot());
+    return reinterpret_cast<LDoubleStackSlot*>(op);
+  }
+
+  static void SetupCache();
+
+ private:
+  static const int kNumCachedOperands = 128;
+  static LDoubleStackSlot cache[];
+
+  LDoubleStackSlot() : LOperand() { }
+  explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
+};
+
+
+class LRegister: public LOperand {
+ public:
+  static LRegister* Create(int index) {
+    ASSERT(index >= 0);
+    if (index < kNumCachedOperands) return &cache[index];
+    return new LRegister(index);
+  }
+
+  static LRegister* cast(LOperand* op) {
+    ASSERT(op->IsRegister());
+    return reinterpret_cast<LRegister*>(op);
+  }
+
+  static void SetupCache();
+
+ private:
+  static const int kNumCachedOperands = 16;
+  static LRegister cache[];
+
+  LRegister() : LOperand() { }
+  explicit LRegister(int index) : LOperand(REGISTER, index) { }
+};
+
+
+class LDoubleRegister: public LOperand {
+ public:
+  static LDoubleRegister* Create(int index) {
+    ASSERT(index >= 0);
+    if (index < kNumCachedOperands) return &cache[index];
+    return new LDoubleRegister(index);
+  }
+
+  static LDoubleRegister* cast(LOperand* op) {
+    ASSERT(op->IsDoubleRegister());
+    return reinterpret_cast<LDoubleRegister*>(op);
+  }
+
+  static void SetupCache();
+
+ private:
+  static const int kNumCachedOperands = 16;
+  static LDoubleRegister cache[];
+
+  LDoubleRegister() : LOperand() { }
+  explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
+};
+
+
 class LParallelMove : public ZoneObject {
  public:
   LParallelMove() : move_operands_(4) { }
@@ -161,6 +509,82 @@
   friend class LCodegen;
 };
 
+
+// Iterates over the non-null, non-constant operands in an environment.
+class ShallowIterator BASE_EMBEDDED {
+ public:
+  explicit ShallowIterator(LEnvironment* env)
+      : env_(env),
+        limit_(env != NULL ? env->values()->length() : 0),
+        current_(0) {
+    current_ = AdvanceToNext(0);
+  }
+
+  inline bool HasNext() {
+    return env_ != NULL && current_ < limit_;
+  }
+
+  inline LOperand* Next() {
+    ASSERT(HasNext());
+    return env_->values()->at(current_);
+  }
+
+  inline void Advance() {
+    current_ = AdvanceToNext(current_ + 1);
+  }
+
+  inline LEnvironment* env() { return env_; }
+
+ private:
+  inline int AdvanceToNext(int start) {
+    while (start < limit_ &&
+           (env_->values()->at(start) == NULL ||
+            env_->values()->at(start)->IsConstantOperand())) {
+      start++;
+    }
+    return start;
+  }
+
+  LEnvironment* env_;
+  int limit_;
+  int current_;
+};
+
+
+// Iterator for non-null, non-constant operands incl. outer environments.
+class DeepIterator BASE_EMBEDDED {
+ public:
+  explicit DeepIterator(LEnvironment* env)
+      : current_iterator_(env) { }
+
+  inline bool HasNext() {
+    if (current_iterator_.HasNext()) return true;
+    if (current_iterator_.env() == NULL) return false;
+    AdvanceToOuter();
+    return current_iterator_.HasNext();
+  }
+
+  inline LOperand* Next() {
+    ASSERT(current_iterator_.HasNext());
+    return current_iterator_.Next();
+  }
+
+  inline void Advance() {
+    if (current_iterator_.HasNext()) {
+      current_iterator_.Advance();
+    } else {
+      AdvanceToOuter();
+    }
+  }
+
+ private:
+  inline void AdvanceToOuter() {
+    current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
+  }
+
+  ShallowIterator current_iterator_;
+};
+
 } }  // namespace v8::internal
 
 #endif  // V8_LITHIUM_H_