Upgrade V8 to 5.1.281.57  DO NOT MERGE

FPIIM-449

Change-Id: Id981b686b4d587ac31697662eb98bb34be42ad90
(cherry picked from commit 3b9bc31999c9787eb726ecdbfd5796bfdec32a18)
diff --git a/test/unittests/interpreter/bytecode-array-builder-unittest.cc b/test/unittests/interpreter/bytecode-array-builder-unittest.cc
index 839215f..255d836 100644
--- a/test/unittests/interpreter/bytecode-array-builder-unittest.cc
+++ b/test/unittests/interpreter/bytecode-array-builder-unittest.cc
@@ -139,7 +139,6 @@
   builder.CompareOperation(Token::Value::EQ, reg)
       .CompareOperation(Token::Value::NE, reg)
       .CompareOperation(Token::Value::EQ_STRICT, reg)
-      .CompareOperation(Token::Value::NE_STRICT, reg)
       .CompareOperation(Token::Value::LT, reg)
       .CompareOperation(Token::Value::GT, reg)
       .CompareOperation(Token::Value::LTE, reg)
@@ -161,6 +160,21 @@
       .JumpIfUndefined(&start)
       .JumpIfNotHole(&start);
 
+  // Longer jumps with constant operands
+  BytecodeLabel end[8];
+  builder.Jump(&end[0])
+      .LoadTrue()
+      .JumpIfTrue(&end[1])
+      .LoadTrue()
+      .JumpIfFalse(&end[2])
+      .LoadLiteral(Smi::FromInt(0))
+      .JumpIfTrue(&end[3])
+      .LoadLiteral(Smi::FromInt(0))
+      .JumpIfFalse(&end[4])
+      .JumpIfNull(&end[5])
+      .JumpIfUndefined(&end[6])
+      .JumpIfNotHole(&end[7]);
+
   // Perform an operation that returns boolean value to
   // generate JumpIfTrue/False
   builder.CompareOperation(Token::Value::EQ, reg)
@@ -205,11 +219,11 @@
 
   builder.ForInPrepare(reg)
       .ForInDone(reg, reg)
-      .ForInNext(reg, reg, reg)
+      .ForInNext(reg, reg, reg, 1)
       .ForInStep(reg);
   builder.ForInPrepare(wide)
       .ForInDone(reg, other)
-      .ForInNext(wide, wide, wide)
+      .ForInNext(wide, wide, wide, 1024)
       .ForInStep(reg);
 
   // Wide constant pool loads
@@ -223,9 +237,13 @@
   // Emit wide global load / store operations.
   builder.LoadGlobal(name, 1024, TypeofMode::NOT_INSIDE_TYPEOF)
       .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
+      .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
       .StoreGlobal(name, 1024, LanguageMode::SLOPPY)
       .StoreGlobal(wide_name, 1, LanguageMode::STRICT);
 
+  // Emit extra wide global load.
+  builder.LoadGlobal(name, 1024 * 1024, TypeofMode::NOT_INSIDE_TYPEOF);
+
   // Emit wide load / store property operations.
   builder.LoadNamedProperty(reg, wide_name, 0)
       .LoadKeyedProperty(reg, 2056)
@@ -271,28 +289,44 @@
       .BinaryOperation(Token::Value::ADD, reg)
       .JumpIfFalse(&start);
 
-  builder.Debugger();
+  // Intrinsics handled by the interpreter.
+  builder.CallRuntime(Runtime::kInlineIsArray, reg, 1)
+      .CallRuntime(Runtime::kInlineIsArray, wide, 1);
 
+  builder.Debugger();
+  for (size_t i = 0; i < arraysize(end); i++) {
+    builder.Bind(&end[i]);
+  }
   builder.Return();
 
   // Generate BytecodeArray.
   Handle<BytecodeArray> the_array = builder.ToBytecodeArray();
   CHECK_EQ(the_array->frame_size(),
-           (builder.fixed_and_temporary_register_count() +
-            builder.translation_register_count()) *
-               kPointerSize);
+           builder.fixed_and_temporary_register_count() * kPointerSize);
 
   // Build scorecard of bytecodes encountered in the BytecodeArray.
   std::vector<int> scorecard(Bytecodes::ToByte(Bytecode::kLast) + 1);
+
   Bytecode final_bytecode = Bytecode::kLdaZero;
   int i = 0;
   while (i < the_array->length()) {
     uint8_t code = the_array->get(i);
     scorecard[code] += 1;
     final_bytecode = Bytecodes::FromByte(code);
-    i += Bytecodes::Size(Bytecodes::FromByte(code));
+    OperandScale operand_scale = OperandScale::kSingle;
+    int prefix_offset = 0;
+    if (Bytecodes::IsPrefixScalingBytecode(final_bytecode)) {
+      operand_scale = Bytecodes::PrefixBytecodeToOperandScale(final_bytecode);
+      prefix_offset = 1;
+      code = the_array->get(i + 1);
+      final_bytecode = Bytecodes::FromByte(code);
+    }
+    i += prefix_offset + Bytecodes::Size(final_bytecode, operand_scale);
   }
 
+  // Insert entry for illegal bytecode as this is never willingly emitted.
+  scorecard[Bytecodes::ToByte(Bytecode::kIllegal)] = 1;
+
   // Check return occurs at the end and only once in the BytecodeArray.
   CHECK_EQ(final_bytecode, Bytecode::kReturn);
   CHECK_EQ(scorecard[Bytecodes::ToByte(final_bytecode)], 1);
@@ -330,7 +364,7 @@
 
 TEST_F(BytecodeArrayBuilderTest, RegisterValues) {
   int index = 1;
-  uint8_t operand = static_cast<uint8_t>(-index);
+  int32_t operand = -index;
 
   Register the_register(index);
   CHECK_EQ(the_register.index(), index);
@@ -531,6 +565,12 @@
   for (int i = 0; i < 63; i++) {
     builder.Jump(&label4);
   }
+
+  // Add padding to force wide backwards jumps.
+  for (int i = 0; i < 256; i++) {
+    builder.LoadTrue();
+  }
+
   builder.BinaryOperation(Token::Value::ADD, reg).JumpIfFalse(&label4);
   builder.BinaryOperation(Token::Value::ADD, reg).JumpIfTrue(&label3);
   builder.CompareOperation(Token::Value::EQ, reg).JumpIfFalse(&label2);
@@ -546,51 +586,65 @@
   // Ignore compare operation.
   iterator.Advance();
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfTrue);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetImmediateOperand(0), -2);
   iterator.Advance();
   // Ignore compare operation.
   iterator.Advance();
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfFalse);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetImmediateOperand(0), -2);
   iterator.Advance();
   // Ignore binary operation.
   iterator.Advance();
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanTrue);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetImmediateOperand(0), -2);
   iterator.Advance();
   // Ignore binary operation.
   iterator.Advance();
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanFalse);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetImmediateOperand(0), -2);
   iterator.Advance();
   for (int i = 0; i < 63; i++) {
     CHECK_EQ(iterator.current_bytecode(), Bytecode::kJump);
+    CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
     CHECK_EQ(iterator.GetImmediateOperand(0), -i * 2 - 4);
     iterator.Advance();
   }
+  // Check padding to force wide backwards jumps.
+  for (int i = 0; i < 256; i++) {
+    CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaTrue);
+    iterator.Advance();
+  }
   // Ignore binary operation.
   iterator.Advance();
-  CHECK_EQ(iterator.current_bytecode(),
-           Bytecode::kJumpIfToBooleanFalseConstant);
-  CHECK_EQ(Smi::cast(*iterator.GetConstantForIndexOperand(0))->value(), -132);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanFalse);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
+  CHECK_EQ(iterator.GetImmediateOperand(0), -389);
   iterator.Advance();
   // Ignore binary operation.
   iterator.Advance();
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanTrueConstant);
-  CHECK_EQ(Smi::cast(*iterator.GetConstantForIndexOperand(0))->value(), -140);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanTrue);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
+  CHECK_EQ(iterator.GetImmediateOperand(0), -399);
   iterator.Advance();
   // Ignore compare operation.
   iterator.Advance();
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfFalseConstant);
-  CHECK_EQ(Smi::cast(*iterator.GetConstantForIndexOperand(0))->value(), -148);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfFalse);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
+  CHECK_EQ(iterator.GetImmediateOperand(0), -409);
   iterator.Advance();
   // Ignore compare operation.
   iterator.Advance();
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfTrueConstant);
-  CHECK_EQ(Smi::cast(*iterator.GetConstantForIndexOperand(0))->value(), -156);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfTrue);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
+  CHECK_EQ(iterator.GetImmediateOperand(0), -419);
   iterator.Advance();
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpConstant);
-  CHECK_EQ(Smi::cast(*iterator.GetConstantForIndexOperand(0))->value(), -160);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kJump);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
+  CHECK_EQ(iterator.GetImmediateOperand(0), -425);
   iterator.Advance();
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
   iterator.Advance();
@@ -652,6 +706,85 @@
   CHECK(iterator.done());
 }
 
+TEST_F(BytecodeArrayBuilderTest, OperandScales) {
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kByte),
+           OperandScale::kSingle);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kShort),
+           OperandScale::kDouble);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(OperandSize::kQuad),
+           OperandScale::kQuadruple);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
+               OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
+               OperandSize::kShort),
+           OperandScale::kDouble);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
+               OperandSize::kQuad, OperandSize::kShort, OperandSize::kShort,
+               OperandSize::kShort),
+           OperandScale::kQuadruple);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
+               OperandSize::kShort, OperandSize::kQuad, OperandSize::kShort,
+               OperandSize::kShort),
+           OperandScale::kQuadruple);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
+               OperandSize::kShort, OperandSize::kShort, OperandSize::kQuad,
+               OperandSize::kShort),
+           OperandScale::kQuadruple);
+  CHECK_EQ(BytecodeArrayBuilder::OperandSizesToScale(
+               OperandSize::kShort, OperandSize::kShort, OperandSize::kShort,
+               OperandSize::kQuad),
+           OperandScale::kQuadruple);
+}
+
+TEST_F(BytecodeArrayBuilderTest, SizesForSignOperands) {
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(0) == OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8) ==
+        OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8) ==
+        OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt8 + 1) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt8 - 1) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt16 + 1) ==
+        OperandSize::kQuad);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt16 - 1) ==
+        OperandSize::kQuad);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMaxInt) ==
+        OperandSize::kQuad);
+  CHECK(BytecodeArrayBuilder::SizeForSignedOperand(kMinInt) ==
+        OperandSize::kQuad);
+}
+
+TEST_F(BytecodeArrayBuilderTest, SizesForUnsignOperands) {
+  // int overloads
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(0) == OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8) ==
+        OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt8 + 1) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16) ==
+        OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(kMaxUInt16 + 1) ==
+        OperandSize::kQuad);
+  // size_t overloads
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(static_cast<size_t>(0)) ==
+        OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
+            static_cast<size_t>(kMaxUInt8)) == OperandSize::kByte);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
+            static_cast<size_t>(kMaxUInt8 + 1)) == OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
+            static_cast<size_t>(kMaxUInt16)) == OperandSize::kShort);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
+            static_cast<size_t>(kMaxUInt16 + 1)) == OperandSize::kQuad);
+  CHECK(BytecodeArrayBuilder::SizeForUnsignedOperand(
+            static_cast<size_t>(kMaxUInt32)) == OperandSize::kQuad);
+}
+
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/test/unittests/interpreter/bytecode-array-iterator-unittest.cc b/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
index f2dcd71..43c6caa 100644
--- a/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
+++ b/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
@@ -22,7 +22,7 @@
 TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
   // Use a builder to create an array with containing multiple bytecodes
   // with 0, 1 and 2 operands.
-  BytecodeArrayBuilder builder(isolate(), zone(), 3, 2, 0);
+  BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
   Factory* factory = isolate()->factory();
   Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
   Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
@@ -31,9 +31,9 @@
   Smi* smi_1 = Smi::FromInt(-65536);
   Register reg_0(0);
   Register reg_1(1);
-  Register reg_2 = Register::FromParameterIndex(2, builder.parameter_count());
+  Register param = Register::FromParameterIndex(2, builder.parameter_count());
   Handle<String> name = factory->NewStringFromStaticChars("abc");
-  int name_index = 3;
+  int name_index = 2;
   int feedback_slot = 97;
 
   builder.LoadLiteral(heap_num_0)
@@ -43,67 +43,139 @@
       .LoadLiteral(smi_1)
       .LoadAccumulatorWithRegister(reg_0)
       .LoadNamedProperty(reg_1, name, feedback_slot)
-      .StoreAccumulatorInRegister(reg_2)
+      .StoreAccumulatorInRegister(param)
+      .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, 1, reg_0)
+      .ForInPrepare(reg_0)
       .CallRuntime(Runtime::kLoadIC_Miss, reg_0, 1)
       .Debugger()
+      .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
       .Return();
 
   // Test iterator sees the expected output from the builder.
   BytecodeArrayIterator iterator(builder.ToBytecodeArray());
+  const int kPrefixByteSize = 1;
+  int offset = 0;
+
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK(iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK(iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
   iterator.Advance();
 
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi8);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_0);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
   iterator.Advance();
 
-  CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
-  CHECK_EQ(*iterator.GetConstantForIndexOperand(0), smi_1);
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+  CHECK_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_1);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+            kPrefixByteSize;
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdar);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kLoadIC);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
   CHECK_EQ(iterator.GetIndexOperand(1), name_index);
   CHECK_EQ(iterator.GetIndexOperand(2), feedback_slot);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kLoadIC, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kStar);
-  CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_2.index());
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+  CHECK_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+  CHECK_EQ(iterator.GetRegisterOperandRange(0), 1);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+  iterator.Advance();
+
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+  CHECK_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+  CHECK_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+  CHECK_EQ(iterator.GetRegisterOperandRange(1), 1);
+  CHECK_EQ(iterator.GetRegisterCountOperand(2), 1);
+  CHECK_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+  CHECK_EQ(iterator.GetRegisterOperandRange(3), 2);
+  CHECK(!iterator.done());
+  offset +=
+      Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+  iterator.Advance();
+
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kForInPrepare);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+  CHECK_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+  CHECK_EQ(iterator.GetRegisterOperandRange(0), 3);
+  CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kCallRuntime);
-  CHECK_EQ(static_cast<Runtime::FunctionId>(iterator.GetIndexOperand(0)),
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+  CHECK_EQ(static_cast<Runtime::FunctionId>(iterator.GetRuntimeIdOperand(0)),
            Runtime::kLoadIC_Miss);
   CHECK_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
   CHECK_EQ(iterator.GetRegisterCountOperand(2), 1);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK(!iterator.done());
+  offset += Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+  iterator.Advance();
+
+  CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+  CHECK_EQ(iterator.current_bytecode_size(), 10);
+  CHECK_EQ(iterator.GetIndexOperand(1), 0x10000000);
+  offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+            kPrefixByteSize;
   iterator.Advance();
 
   CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+  CHECK_EQ(iterator.current_offset(), offset);
+  CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
   CHECK(!iterator.done());
   iterator.Advance();
   CHECK(iterator.done());
diff --git a/test/unittests/interpreter/bytecode-register-allocator-unittest.cc b/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
index ec29935..d4dc111 100644
--- a/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
+++ b/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
@@ -140,29 +140,6 @@
   }
 }
 
-TEST_F(TemporaryRegisterAllocatorTest, RangeAvoidsTranslationBoundary) {
-  int boundary = RegisterTranslator::DistanceToTranslationWindow(Register(0));
-  int limit = boundary + 64;
-
-  for (int run_length = 2; run_length < 32; run_length += 7) {
-    ZoneVector<int> run_starts(zone());
-    for (int start = 0; start < limit; start += run_length) {
-      int run_start =
-          allocator()->PrepareForConsecutiveTemporaryRegisters(run_length);
-      run_starts.push_back(run_start);
-      for (int i = 0; i < run_length; i++) {
-        allocator()->BorrowConsecutiveTemporaryRegister(run_start + i);
-      }
-      CHECK(run_start >= boundary || run_start + run_length <= boundary);
-    }
-    for (size_t batch = 0; batch < run_starts.size(); batch++) {
-      for (int i = run_starts[batch]; i < run_starts[batch] + run_length; i++) {
-        allocator()->ReturnTemporaryRegister(i);
-      }
-    }
-  }
-}
-
 TEST_F(TemporaryRegisterAllocatorTest, NotInRange) {
   for (int i = 0; i < 10; i++) {
     int reg = allocator()->BorrowTemporaryRegisterNotInRange(2, 5);
diff --git a/test/unittests/interpreter/bytecodes-unittest.cc b/test/unittests/interpreter/bytecodes-unittest.cc
index 212e029..b3554c3 100644
--- a/test/unittests/interpreter/bytecodes-unittest.cc
+++ b/test/unittests/interpreter/bytecodes-unittest.cc
@@ -14,28 +14,27 @@
 namespace interpreter {
 
 TEST(OperandConversion, Registers) {
-  int register_count = Register::MaxRegisterIndex() + 1;
+  int register_count = 128;
   int step = register_count / 7;
   for (int i = 0; i < register_count; i += step) {
     if (i <= kMaxInt8) {
-      uint8_t operand0 = Register(i).ToOperand();
+      uint32_t operand0 = Register(i).ToOperand();
       Register reg0 = Register::FromOperand(operand0);
       CHECK_EQ(i, reg0.index());
     }
 
-    uint16_t operand1 = Register(i).ToWideOperand();
-    Register reg1 = Register::FromWideOperand(operand1);
+    uint32_t operand1 = Register(i).ToOperand();
+    Register reg1 = Register::FromOperand(operand1);
     CHECK_EQ(i, reg1.index());
 
-    uint32_t operand2 = Register(i).ToRawOperand();
-    Register reg2 = Register::FromRawOperand(operand2);
+    uint32_t operand2 = Register(i).ToOperand();
+    Register reg2 = Register::FromOperand(operand2);
     CHECK_EQ(i, reg2.index());
   }
 
   for (int i = 0; i <= kMaxUInt8; i++) {
-    uint8_t operand = static_cast<uint8_t>(i);
-    Register reg = Register::FromOperand(operand);
-    if (i > 0 && i < -kMinInt8) {
+    Register reg = Register::FromOperand(i);
+    if (i > 0) {
       CHECK(reg.is_parameter());
     } else {
       CHECK(!reg.is_parameter());
@@ -51,7 +50,7 @@
     int parameter_count = parameter_counts[p];
     for (int i = 0; i < parameter_count; i++) {
       Register r = Register::FromParameterIndex(i, parameter_count);
-      uint8_t operand_value = r.ToOperand();
+      uint32_t operand_value = r.ToOperand();
       Register s = Register::FromOperand(operand_value);
       CHECK_EQ(i, s.ToParameterIndex(parameter_count));
     }
@@ -59,8 +58,8 @@
 }
 
 TEST(OperandConversion, RegistersParametersNoOverlap) {
-  int register_count = Register::MaxRegisterIndex() + 1;
-  int parameter_count = Register::MaxParameterIndex() + 1;
+  int register_count = 128;
+  int parameter_count = 100;
   int32_t register_space_size = base::bits::RoundUpToPowerOfTwo32(
       static_cast<uint32_t>(register_count + parameter_count));
   uint32_t range = static_cast<uint32_t>(register_space_size);
@@ -68,18 +67,33 @@
 
   for (int i = 0; i < register_count; i += 1) {
     Register r = Register(i);
-    uint32_t operand = r.ToWideOperand();
-    CHECK_LT(operand, operand_count.size());
-    operand_count[operand] += 1;
-    CHECK_EQ(operand_count[operand], 1);
+    int32_t operand = r.ToOperand();
+    uint8_t index = static_cast<uint8_t>(operand);
+    CHECK_LT(index, operand_count.size());
+    operand_count[index] += 1;
+    CHECK_EQ(operand_count[index], 1);
   }
 
   for (int i = 0; i < parameter_count; i += 1) {
     Register r = Register::FromParameterIndex(i, parameter_count);
-    uint32_t operand = r.ToWideOperand();
-    CHECK_LT(operand, operand_count.size());
-    operand_count[operand] += 1;
-    CHECK_EQ(operand_count[operand], 1);
+    uint32_t operand = r.ToOperand();
+    uint8_t index = static_cast<uint8_t>(operand);
+    CHECK_LT(index, operand_count.size());
+    operand_count[index] += 1;
+    CHECK_EQ(operand_count[index], 1);
+  }
+}
+
+TEST(OperandScaling, ScalableAndNonScalable) {
+  for (OperandScale operand_scale = OperandScale::kSingle;
+       operand_scale <= OperandScale::kMaxValid;
+       operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
+    int scale = static_cast<int>(operand_scale);
+    CHECK_EQ(Bytecodes::Size(Bytecode::kCallRuntime, operand_scale),
+             1 + 2 + 2 * scale);
+    CHECK_EQ(Bytecodes::Size(Bytecode::kCreateObjectLiteral, operand_scale),
+             1 + 2 * scale + 1);
+    CHECK_EQ(Bytecodes::Size(Bytecode::kTestIn, operand_scale), 1 + scale);
   }
 }
 
@@ -87,16 +101,11 @@
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kAdd), 1);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kCall), 2);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kCallRuntime), 1);
-  CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kCallRuntimeWide), 1);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kCallRuntimeForPair),
            2);
-  CHECK_EQ(
-      Bytecodes::NumberOfRegisterOperands(Bytecode::kCallRuntimeForPairWide),
-      2);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kDeletePropertyStrict),
            1);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kForInPrepare), 1);
-  CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kForInPrepareWide), 1);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kInc), 0);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kJumpIfTrue), 0);
   CHECK_EQ(Bytecodes::NumberOfRegisterOperands(Bytecode::kNew), 2);
@@ -116,11 +125,11 @@
 }
 
 TEST(Bytecodes, RegisterOperands) {
-  CHECK(Bytecodes::IsRegisterOperandType(OperandType::kReg8));
-  CHECK(Bytecodes::IsRegisterInputOperandType(OperandType::kReg8));
-  CHECK(!Bytecodes::IsRegisterOutputOperandType(OperandType::kReg8));
-  CHECK(!Bytecodes::IsRegisterInputOperandType(OperandType::kRegOut8));
-  CHECK(Bytecodes::IsRegisterOutputOperandType(OperandType::kRegOut8));
+  CHECK(Bytecodes::IsRegisterOperandType(OperandType::kReg));
+  CHECK(Bytecodes::IsRegisterInputOperandType(OperandType::kReg));
+  CHECK(!Bytecodes::IsRegisterOutputOperandType(OperandType::kReg));
+  CHECK(!Bytecodes::IsRegisterInputOperandType(OperandType::kRegOut));
+  CHECK(Bytecodes::IsRegisterOutputOperandType(OperandType::kRegOut));
 
 #define IS_REGISTER_OPERAND_TYPE(Name, _) \
   CHECK(Bytecodes::IsRegisterOperandType(OperandType::k##Name));
@@ -155,16 +164,155 @@
 #undef IS_NOT_REGISTER_INPUT_OPERAND_TYPE
 }
 
-TEST(Bytecodes, DebugBreak) {
-  for (uint32_t i = 0; i < Bytecodes::ToByte(Bytecode::kLast); i++) {
-    Bytecode bytecode = Bytecodes::FromByte(i);
-    Bytecode debugbreak = Bytecodes::GetDebugBreak(bytecode);
-    if (!Bytecodes::IsDebugBreak(debugbreak)) {
-      PrintF("Bytecode %s has no matching debug break with length %d\n",
-             Bytecodes::ToString(bytecode), Bytecodes::Size(bytecode));
-      CHECK(false);
-    }
+TEST(Bytecodes, DebugBreakExistForEachBytecode) {
+  static const OperandScale kOperandScale = OperandScale::kSingle;
+#define CHECK_DEBUG_BREAK_SIZE(Name, ...)                                  \
+  if (!Bytecodes::IsDebugBreak(Bytecode::k##Name) &&                       \
+      !Bytecodes::IsPrefixScalingBytecode(Bytecode::k##Name)) {            \
+    Bytecode debug_bytecode = Bytecodes::GetDebugBreak(Bytecode::k##Name); \
+    CHECK_EQ(Bytecodes::Size(Bytecode::k##Name, kOperandScale),            \
+             Bytecodes::Size(debug_bytecode, kOperandScale));              \
   }
+  BYTECODE_LIST(CHECK_DEBUG_BREAK_SIZE)
+#undef CHECK_DEBUG_BREAK_SIZE
+}
+
+TEST(Bytecodes, DecodeBytecodeAndOperands) {
+  struct BytecodesAndResult {
+    const uint8_t bytecode[32];
+    const size_t length;
+    int parameter_count;
+    const char* output;
+  };
+
+#define B(Name) static_cast<uint8_t>(Bytecode::k##Name)
+  const BytecodesAndResult cases[] = {
+      {{B(LdaSmi), 0x01}, 2, 0, "            LdaSmi [1]"},
+      {{B(Wide), B(LdaSmi), 0xe8, 0x03}, 4, 0, "      LdaSmi.Wide [1000]"},
+      {{B(ExtraWide), B(LdaSmi), 0xa0, 0x86, 0x01, 0x00},
+       6,
+       0,
+       "LdaSmi.ExtraWide [100000]"},
+      {{B(LdaSmi), 0xff}, 2, 0, "            LdaSmi [-1]"},
+      {{B(Wide), B(LdaSmi), 0x18, 0xfc}, 4, 0, "      LdaSmi.Wide [-1000]"},
+      {{B(ExtraWide), B(LdaSmi), 0x60, 0x79, 0xfe, 0xff},
+       6,
+       0,
+       "LdaSmi.ExtraWide [-100000]"},
+      {{B(Star), 0xfb}, 2, 0, "            Star r5"},
+      {{B(Wide), B(Star), 0x78, 0xff}, 4, 0, "      Star.Wide r136"},
+      {{B(Wide), B(Call), 0x7a, 0xff, 0x79, 0xff, 0x02, 0x00, 0xb1, 0x00},
+       10,
+       0,
+       "Call.Wide r134, r135, #2, [177]"},
+      {{B(Ldar),
+        static_cast<uint8_t>(Register::FromParameterIndex(2, 3).ToOperand())},
+       2,
+       3,
+       "            Ldar a1"},
+      {{B(Wide), B(CreateObjectLiteral), 0x01, 0x02, 0x03, 0x04, 0xa5},
+       7,
+       0,
+       "CreateObjectLiteral.Wide [513], [1027], #165"},
+      {{B(ExtraWide), B(JumpIfNull), 0x15, 0xcd, 0x5b, 0x07},
+       6,
+       0,
+       "JumpIfNull.ExtraWide [123456789]"},
+  };
+#undef B
+
+  for (size_t i = 0; i < arraysize(cases); ++i) {
+    // Generate reference string by prepending formatted bytes.
+    std::stringstream expected_ss;
+    std::ios default_format(nullptr);
+    default_format.copyfmt(expected_ss);
+    // Match format of Bytecodes::Decode() for byte representations.
+    expected_ss.fill('0');
+    expected_ss.flags(std::ios::right | std::ios::hex);
+    for (size_t b = 0; b < cases[i].length; b++) {
+      expected_ss << std::setw(2) << static_cast<uint32_t>(cases[i].bytecode[b])
+                  << ' ';
+    }
+    expected_ss.copyfmt(default_format);
+    expected_ss << cases[i].output;
+
+    // Generate decoded byte output.
+    std::stringstream actual_ss;
+    Bytecodes::Decode(actual_ss, cases[i].bytecode, cases[i].parameter_count);
+
+    // Compare.
+    CHECK_EQ(actual_ss.str(), expected_ss.str());
+  }
+}
+
+TEST(Bytecodes, DebugBreakForPrefixBytecodes) {
+  CHECK_EQ(Bytecode::kDebugBreakWide,
+           Bytecodes::GetDebugBreak(Bytecode::kWide));
+  CHECK_EQ(Bytecode::kDebugBreakExtraWide,
+           Bytecodes::GetDebugBreak(Bytecode::kExtraWide));
+}
+
+TEST(Bytecodes, PrefixMappings) {
+  Bytecode prefixes[] = {Bytecode::kWide, Bytecode::kExtraWide};
+  TRACED_FOREACH(Bytecode, prefix, prefixes) {
+    CHECK_EQ(prefix, Bytecodes::OperandScaleToPrefixBytecode(
+                         Bytecodes::PrefixBytecodeToOperandScale(prefix)));
+  }
+}
+
+TEST(OperandScale, PrefixesScale) {
+  CHECK(Bytecodes::NextOperandScale(OperandScale::kSingle) ==
+        OperandScale::kDouble);
+  CHECK(Bytecodes::NextOperandScale(OperandScale::kDouble) ==
+        OperandScale::kQuadruple);
+  CHECK(Bytecodes::NextOperandScale(OperandScale::kQuadruple) ==
+        OperandScale::kInvalid);
+}
+
+TEST(OperandScale, PrefixesRequired) {
+  CHECK(!Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kSingle));
+  CHECK(Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kDouble));
+  CHECK(
+      Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kQuadruple));
+  CHECK(Bytecodes::OperandScaleToPrefixBytecode(OperandScale::kDouble) ==
+        Bytecode::kWide);
+  CHECK(Bytecodes::OperandScaleToPrefixBytecode(OperandScale::kQuadruple) ==
+        Bytecode::kExtraWide);
+}
+
+TEST(AccumulatorUse, LogicalOperators) {
+  CHECK_EQ(AccumulatorUse::kNone | AccumulatorUse::kRead,
+           AccumulatorUse::kRead);
+  CHECK_EQ(AccumulatorUse::kRead | AccumulatorUse::kWrite,
+           AccumulatorUse::kReadWrite);
+  CHECK_EQ(AccumulatorUse::kRead & AccumulatorUse::kReadWrite,
+           AccumulatorUse::kRead);
+  CHECK_EQ(AccumulatorUse::kRead & AccumulatorUse::kWrite,
+           AccumulatorUse::kNone);
+}
+
+TEST(AccumulatorUse, SampleBytecodes) {
+  CHECK(Bytecodes::ReadsAccumulator(Bytecode::kStar));
+  CHECK(!Bytecodes::WritesAccumulator(Bytecode::kStar));
+  CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kStar),
+           AccumulatorUse::kRead);
+  CHECK(!Bytecodes::ReadsAccumulator(Bytecode::kLdar));
+  CHECK(Bytecodes::WritesAccumulator(Bytecode::kLdar));
+  CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kLdar),
+           AccumulatorUse::kWrite);
+  CHECK(Bytecodes::ReadsAccumulator(Bytecode::kAdd));
+  CHECK(Bytecodes::WritesAccumulator(Bytecode::kAdd));
+  CHECK_EQ(Bytecodes::GetAccumulatorUse(Bytecode::kAdd),
+           AccumulatorUse::kReadWrite);
+}
+
+TEST(AccumulatorUse, AccumulatorUseToString) {
+  std::set<std::string> names;
+  names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kNone));
+  names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kRead));
+  names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kWrite));
+  names.insert(Bytecodes::AccumulatorUseToString(AccumulatorUse::kReadWrite));
+  CHECK_EQ(names.size(), 4);
 }
 
 }  // namespace interpreter
diff --git a/test/unittests/interpreter/constant-array-builder-unittest.cc b/test/unittests/interpreter/constant-array-builder-unittest.cc
index b3ec5ff..7122437 100644
--- a/test/unittests/interpreter/constant-array-builder-unittest.cc
+++ b/test/unittests/interpreter/constant-array-builder-unittest.cc
@@ -19,79 +19,76 @@
   ConstantArrayBuilderTest() {}
   ~ConstantArrayBuilderTest() override {}
 
-  static const size_t kLowCapacity = ConstantArrayBuilder::kLowCapacity;
-  static const size_t kMaxCapacity = ConstantArrayBuilder::kMaxCapacity;
+  static const size_t k8BitCapacity = ConstantArrayBuilder::k8BitCapacity;
+  static const size_t k16BitCapacity = ConstantArrayBuilder::k16BitCapacity;
 };
 
-
 STATIC_CONST_MEMBER_DEFINITION const size_t
-    ConstantArrayBuilderTest::kMaxCapacity;
+    ConstantArrayBuilderTest::k16BitCapacity;
 STATIC_CONST_MEMBER_DEFINITION const size_t
-    ConstantArrayBuilderTest::kLowCapacity;
-
+    ConstantArrayBuilderTest::k8BitCapacity;
 
 TEST_F(ConstantArrayBuilderTest, AllocateAllEntries) {
   ConstantArrayBuilder builder(isolate(), zone());
-  for (size_t i = 0; i < kMaxCapacity; i++) {
+  for (size_t i = 0; i < k16BitCapacity; i++) {
     builder.Insert(handle(Smi::FromInt(static_cast<int>(i)), isolate()));
   }
-  CHECK_EQ(builder.size(), kMaxCapacity);
-  for (size_t i = 0; i < kMaxCapacity; i++) {
+  CHECK_EQ(builder.size(), k16BitCapacity);
+  for (size_t i = 0; i < k16BitCapacity; i++) {
     CHECK_EQ(Handle<Smi>::cast(builder.At(i))->value(), i);
   }
 }
 
-
 TEST_F(ConstantArrayBuilderTest, AllocateEntriesWithIdx8Reservations) {
-  for (size_t reserved = 1; reserved < kLowCapacity; reserved *= 3) {
+  for (size_t reserved = 1; reserved < k8BitCapacity; reserved *= 3) {
     ConstantArrayBuilder builder(isolate(), zone());
     for (size_t i = 0; i < reserved; i++) {
       OperandSize operand_size = builder.CreateReservedEntry();
       CHECK(operand_size == OperandSize::kByte);
     }
-    for (size_t i = 0; i < 2 * kLowCapacity; i++) {
+    for (size_t i = 0; i < 2 * k8BitCapacity; i++) {
       Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
       builder.Insert(object);
-      if (i + reserved < kLowCapacity) {
-        CHECK_LE(builder.size(), kLowCapacity);
+      if (i + reserved < k8BitCapacity) {
+        CHECK_LE(builder.size(), k8BitCapacity);
         CHECK_EQ(builder.size(), i + 1);
         CHECK(builder.At(i)->SameValue(*object));
       } else {
-        CHECK_GE(builder.size(), kLowCapacity);
+        CHECK_GE(builder.size(), k8BitCapacity);
         CHECK_EQ(builder.size(), i + reserved + 1);
         CHECK(builder.At(i + reserved)->SameValue(*object));
       }
     }
-    CHECK_EQ(builder.size(), 2 * kLowCapacity + reserved);
+    CHECK_EQ(builder.size(), 2 * k8BitCapacity + reserved);
 
     // Check reserved values represented by the hole.
     for (size_t i = 0; i < reserved; i++) {
-      Handle<Object> empty = builder.At(kLowCapacity - reserved + i);
+      Handle<Object> empty = builder.At(k8BitCapacity - reserved + i);
       CHECK(empty->SameValue(isolate()->heap()->the_hole_value()));
     }
 
     // Commmit reserved entries with duplicates and check size does not change.
-    DCHECK_EQ(reserved + 2 * kLowCapacity, builder.size());
+    DCHECK_EQ(reserved + 2 * k8BitCapacity, builder.size());
     size_t duplicates_in_idx8_space =
-        std::min(reserved, kLowCapacity - reserved);
+        std::min(reserved, k8BitCapacity - reserved);
     for (size_t i = 0; i < duplicates_in_idx8_space; i++) {
       builder.CommitReservedEntry(OperandSize::kByte,
                                   isolate()->factory()->NewNumberFromSize(i));
-      DCHECK_EQ(reserved + 2 * kLowCapacity, builder.size());
+      DCHECK_EQ(reserved + 2 * k8BitCapacity, builder.size());
     }
 
     // Check all committed values match expected (holes where
     // duplicates_in_idx8_space allocated).
-    for (size_t i = 0; i < kLowCapacity - reserved; i++) {
+    for (size_t i = 0; i < k8BitCapacity - reserved; i++) {
       Smi* smi = Smi::FromInt(static_cast<int>(i));
       CHECK(Handle<Smi>::cast(builder.At(i))->SameValue(smi));
     }
-    for (size_t i = kLowCapacity; i < 2 * kLowCapacity + reserved; i++) {
+    for (size_t i = k8BitCapacity; i < 2 * k8BitCapacity + reserved; i++) {
       Smi* smi = Smi::FromInt(static_cast<int>(i - reserved));
       CHECK(Handle<Smi>::cast(builder.At(i))->SameValue(smi));
     }
     for (size_t i = 0; i < reserved; i++) {
-      size_t index = kLowCapacity - reserved + i;
+      size_t index = k8BitCapacity - reserved + i;
       CHECK(builder.At(index)->IsTheHole());
     }
 
@@ -102,20 +99,19 @@
     }
     for (size_t i = 0; i < duplicates_in_idx8_space; i++) {
       Handle<Object> object =
-          isolate()->factory()->NewNumberFromSize(2 * kLowCapacity + i);
+          isolate()->factory()->NewNumberFromSize(2 * k8BitCapacity + i);
       size_t index = builder.CommitReservedEntry(OperandSize::kByte, object);
-      CHECK_EQ(static_cast<int>(index), kLowCapacity - reserved + i);
+      CHECK_EQ(static_cast<int>(index), k8BitCapacity - reserved + i);
       CHECK(builder.At(static_cast<int>(index))->SameValue(*object));
     }
-    CHECK_EQ(builder.size(), 2 * kLowCapacity + reserved);
+    CHECK_EQ(builder.size(), 2 * k8BitCapacity + reserved);
   }
 }
 
-
-TEST_F(ConstantArrayBuilderTest, AllocateEntriesWithIdx16Reservations) {
-  for (size_t reserved = 1; reserved < kLowCapacity; reserved *= 3) {
+TEST_F(ConstantArrayBuilderTest, AllocateEntriesWithWideReservations) {
+  for (size_t reserved = 1; reserved < k8BitCapacity; reserved *= 3) {
     ConstantArrayBuilder builder(isolate(), zone());
-    for (size_t i = 0; i < kLowCapacity; i++) {
+    for (size_t i = 0; i < k8BitCapacity; i++) {
       Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
       builder.Insert(object);
       CHECK(builder.At(i)->SameValue(*object));
@@ -124,20 +120,20 @@
     for (size_t i = 0; i < reserved; i++) {
       OperandSize operand_size = builder.CreateReservedEntry();
       CHECK(operand_size == OperandSize::kShort);
-      CHECK_EQ(builder.size(), kLowCapacity);
+      CHECK_EQ(builder.size(), k8BitCapacity);
     }
     for (size_t i = 0; i < reserved; i++) {
       builder.DiscardReservedEntry(OperandSize::kShort);
-      CHECK_EQ(builder.size(), kLowCapacity);
+      CHECK_EQ(builder.size(), k8BitCapacity);
     }
     for (size_t i = 0; i < reserved; i++) {
       OperandSize operand_size = builder.CreateReservedEntry();
       CHECK(operand_size == OperandSize::kShort);
       Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
       builder.CommitReservedEntry(operand_size, object);
-      CHECK_EQ(builder.size(), kLowCapacity);
+      CHECK_EQ(builder.size(), k8BitCapacity);
     }
-    for (size_t i = kLowCapacity; i < kLowCapacity + reserved; i++) {
+    for (size_t i = k8BitCapacity; i < k8BitCapacity + reserved; i++) {
       OperandSize operand_size = builder.CreateReservedEntry();
       CHECK(operand_size == OperandSize::kShort);
       Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
@@ -163,26 +159,40 @@
   }
 }
 
+TEST_F(ConstantArrayBuilderTest, ToLargeFixedArray) {
+  ConstantArrayBuilder builder(isolate(), zone());
+  static const size_t kNumberOfElements = 37373;
+  for (size_t i = 0; i < kNumberOfElements; i++) {
+    Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
+    builder.Insert(object);
+    CHECK(builder.At(i)->SameValue(*object));
+  }
+  Handle<FixedArray> constant_array = builder.ToFixedArray();
+  CHECK_EQ(constant_array->length(), kNumberOfElements);
+  for (size_t i = 0; i < kNumberOfElements; i++) {
+    CHECK(constant_array->get(static_cast<int>(i))->SameValue(*builder.At(i)));
+  }
+}
 
 TEST_F(ConstantArrayBuilderTest, GapFilledWhenLowReservationCommitted) {
   ConstantArrayBuilder builder(isolate(), zone());
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     OperandSize operand_size = builder.CreateReservedEntry();
     CHECK(OperandSize::kByte == operand_size);
     CHECK_EQ(builder.size(), 0);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
     builder.Insert(object);
-    CHECK_EQ(builder.size(), i + kLowCapacity + 1);
+    CHECK_EQ(builder.size(), i + k8BitCapacity + 1);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     builder.CommitReservedEntry(OperandSize::kByte,
-                                builder.At(i + kLowCapacity));
-    CHECK_EQ(builder.size(), 2 * kLowCapacity);
+                                builder.At(i + k8BitCapacity));
+    CHECK_EQ(builder.size(), 2 * k8BitCapacity);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
-    Handle<Object> original = builder.At(kLowCapacity + i);
+  for (size_t i = 0; i < k8BitCapacity; i++) {
+    Handle<Object> original = builder.At(k8BitCapacity + i);
     Handle<Object> duplicate = builder.At(i);
     CHECK(original->SameValue(*duplicate));
     Handle<Object> reference = isolate()->factory()->NewNumberFromSize(i);
@@ -190,33 +200,89 @@
   }
 }
 
-
 TEST_F(ConstantArrayBuilderTest, GapNotFilledWhenLowReservationDiscarded) {
   ConstantArrayBuilder builder(isolate(), zone());
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     OperandSize operand_size = builder.CreateReservedEntry();
     CHECK(OperandSize::kByte == operand_size);
     CHECK_EQ(builder.size(), 0);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     Handle<Object> object = isolate()->factory()->NewNumberFromSize(i);
     builder.Insert(object);
-    CHECK_EQ(builder.size(), i + kLowCapacity + 1);
+    CHECK_EQ(builder.size(), i + k8BitCapacity + 1);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     builder.DiscardReservedEntry(OperandSize::kByte);
-    builder.Insert(builder.At(i + kLowCapacity));
-    CHECK_EQ(builder.size(), 2 * kLowCapacity);
+    builder.Insert(builder.At(i + k8BitCapacity));
+    CHECK_EQ(builder.size(), 2 * k8BitCapacity);
   }
-  for (size_t i = 0; i < kLowCapacity; i++) {
+  for (size_t i = 0; i < k8BitCapacity; i++) {
     Handle<Object> reference = isolate()->factory()->NewNumberFromSize(i);
-    Handle<Object> original = builder.At(kLowCapacity + i);
+    Handle<Object> original = builder.At(k8BitCapacity + i);
     CHECK(original->SameValue(*reference));
     Handle<Object> duplicate = builder.At(i);
     CHECK(duplicate->SameValue(*isolate()->factory()->the_hole_value()));
   }
 }
 
+TEST_F(ConstantArrayBuilderTest, HolesWithUnusedReservations) {
+  static int kNumberOfHoles = 128;
+  ConstantArrayBuilder builder(isolate(), zone());
+  for (int i = 0; i < kNumberOfHoles; ++i) {
+    CHECK_EQ(builder.CreateReservedEntry(), OperandSize::kByte);
+  }
+  for (int i = 0; i < 128; ++i) {
+    CHECK_EQ(builder.Insert(isolate()->factory()->NewNumber(i)), i);
+  }
+  CHECK_EQ(builder.Insert(isolate()->factory()->NewNumber(256)), 256);
+
+  Handle<FixedArray> constant_array = builder.ToFixedArray();
+  CHECK_EQ(constant_array->length(), 257);
+  for (int i = 128; i < 256; i++) {
+    CHECK(constant_array->get(i)->SameValue(
+        *isolate()->factory()->the_hole_value()));
+  }
+  CHECK(!constant_array->get(127)->SameValue(
+      *isolate()->factory()->the_hole_value()));
+  CHECK(!constant_array->get(256)->SameValue(
+      *isolate()->factory()->the_hole_value()));
+}
+
+TEST_F(ConstantArrayBuilderTest, ReservationsAtAllScales) {
+  ConstantArrayBuilder builder(isolate(), zone());
+  for (int i = 0; i < 256; i++) {
+    CHECK_EQ(builder.CreateReservedEntry(), OperandSize::kByte);
+  }
+  for (int i = 256; i < 65536; ++i) {
+    CHECK_EQ(builder.CreateReservedEntry(), OperandSize::kShort);
+  }
+  for (int i = 65536; i < 131072; ++i) {
+    CHECK_EQ(builder.CreateReservedEntry(), OperandSize::kQuad);
+  }
+  CHECK_EQ(builder.CommitReservedEntry(OperandSize::kByte,
+                                       isolate()->factory()->NewNumber(1)),
+           0);
+  CHECK_EQ(builder.CommitReservedEntry(OperandSize::kShort,
+                                       isolate()->factory()->NewNumber(2)),
+           256);
+  CHECK_EQ(builder.CommitReservedEntry(OperandSize::kQuad,
+                                       isolate()->factory()->NewNumber(3)),
+           65536);
+  Handle<FixedArray> constant_array = builder.ToFixedArray();
+  CHECK_EQ(constant_array->length(), 65537);
+  int count = 1;
+  for (int i = 0; i < constant_array->length(); ++i) {
+    Handle<Object> expected;
+    if (i == 0 || i == 256 || i == 65536) {
+      expected = isolate()->factory()->NewNumber(count++);
+    } else {
+      expected = isolate()->factory()->the_hole_value();
+    }
+    CHECK(constant_array->get(i)->SameValue(*expected));
+  }
+}
+
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/test/unittests/interpreter/interpreter-assembler-unittest.cc b/test/unittests/interpreter/interpreter-assembler-unittest.cc
index 3375a6b..0106c57 100644
--- a/test/unittests/interpreter/interpreter-assembler-unittest.cc
+++ b/test/unittests/interpreter/interpreter-assembler-unittest.cc
@@ -62,6 +62,18 @@
                            : IsWord32Or(lhs_matcher, rhs_matcher);
 }
 
+InterpreterAssemblerTest::InterpreterAssemblerForTest::
+    ~InterpreterAssemblerForTest() {
+  // Tests don't necessarily read and write accumulator but
+  // InterpreterAssembler checks accumulator uses.
+  if (Bytecodes::ReadsAccumulator(bytecode())) {
+    GetAccumulator();
+  }
+  if (Bytecodes::WritesAccumulator(bytecode())) {
+    SetAccumulator(nullptr);
+  }
+}
+
 Matcher<Node*> InterpreterAssemblerTest::InterpreterAssemblerForTest::IsLoad(
     const Matcher<LoadRepresentation>& rep_matcher,
     const Matcher<Node*>& base_matcher, const Matcher<Node*>& index_matcher) {
@@ -77,24 +89,25 @@
 }
 
 Matcher<Node*>
-InterpreterAssemblerTest::InterpreterAssemblerForTest::IsBytecodeOperand(
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedByteOperand(
     int offset) {
   return IsLoad(
       MachineType::Uint8(),
       IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
       IsIntPtrAdd(
           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-          IsInt32Constant(offset)));
+          IsIntPtrConstant(offset)));
 }
 
-Matcher<Node*> InterpreterAssemblerTest::InterpreterAssemblerForTest::
-    IsBytecodeOperandSignExtended(int offset) {
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedByteOperand(
+    int offset) {
   Matcher<Node*> load_matcher = IsLoad(
       MachineType::Int8(),
       IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
       IsIntPtrAdd(
           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-          IsInt32Constant(offset)));
+          IsIntPtrConstant(offset)));
   if (kPointerSize == 8) {
     load_matcher = IsChangeInt32ToInt64(load_matcher);
   }
@@ -102,7 +115,7 @@
 }
 
 Matcher<Node*>
-InterpreterAssemblerTest::InterpreterAssemblerForTest::IsBytecodeOperandShort(
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedShortOperand(
     int offset) {
   if (TargetSupportsUnalignedAccess()) {
     return IsLoad(
@@ -111,36 +124,35 @@
         IsIntPtrAdd(
             IsParameter(
                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(offset)));
+            IsIntPtrConstant(offset)));
   } else {
-    Matcher<Node*> first_byte = IsLoad(
-        MachineType::Uint8(),
-        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
-        IsIntPtrAdd(
-            IsParameter(
-                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(offset)));
-    Matcher<Node*> second_byte = IsLoad(
-        MachineType::Uint8(),
-        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
-        IsIntPtrAdd(
-            IsParameter(
-                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(offset + 1)));
 #if V8_TARGET_LITTLE_ENDIAN
-    return IsWordOr(IsWordShl(second_byte, IsInt32Constant(kBitsPerByte)),
-                    first_byte);
+    const int kStep = -1;
+    const int kMsbOffset = 1;
 #elif V8_TARGET_BIG_ENDIAN
-    return IsWordOr(IsWordShl(first_byte, IsInt32Constant(kBitsPerByte)),
-                    second_byte);
+    const int kStep = 1;
+    const int kMsbOffset = 0;
 #else
 #error "Unknown Architecture"
 #endif
+    Matcher<Node*> bytes[2];
+    for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
+      bytes[i] = IsLoad(
+          MachineType::Uint8(),
+          IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+          IsIntPtrAdd(
+              IsParameter(
+                  InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+              IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
+    }
+    return IsWord32Or(IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)),
+                      bytes[1]);
   }
 }
 
-Matcher<Node*> InterpreterAssemblerTest::InterpreterAssemblerForTest::
-    IsBytecodeOperandShortSignExtended(int offset) {
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
+    int offset) {
   Matcher<Node*> load_matcher;
   if (TargetSupportsUnalignedAccess()) {
     load_matcher = IsLoad(
@@ -149,34 +161,29 @@
         IsIntPtrAdd(
             IsParameter(
                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(offset)));
+            IsIntPtrConstant(offset)));
   } else {
 #if V8_TARGET_LITTLE_ENDIAN
-    int hi_byte_offset = offset + 1;
-    int lo_byte_offset = offset;
-
+    const int kStep = -1;
+    const int kMsbOffset = 1;
 #elif V8_TARGET_BIG_ENDIAN
-    int hi_byte_offset = offset;
-    int lo_byte_offset = offset + 1;
+    const int kStep = 1;
+    const int kMsbOffset = 0;
 #else
 #error "Unknown Architecture"
 #endif
-    Matcher<Node*> hi_byte = IsLoad(
-        MachineType::Int8(),
-        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
-        IsIntPtrAdd(
-            IsParameter(
-                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(hi_byte_offset)));
-    hi_byte = IsWord32Shl(hi_byte, IsInt32Constant(kBitsPerByte));
-    Matcher<Node*> lo_byte = IsLoad(
-        MachineType::Uint8(),
-        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
-        IsIntPtrAdd(
-            IsParameter(
-                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-            IsInt32Constant(lo_byte_offset)));
-    load_matcher = IsWord32Or(hi_byte, lo_byte);
+    Matcher<Node*> bytes[2];
+    for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
+      bytes[i] = IsLoad(
+          (i == 0) ? MachineType::Int8() : MachineType::Uint8(),
+          IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+          IsIntPtrAdd(
+              IsParameter(
+                  InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+              IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
+    }
+    load_matcher = IsWord32Or(
+        IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)), bytes[1]);
   }
 
   if (kPointerSize == 8) {
@@ -185,6 +192,124 @@
   return load_matcher;
 }
 
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedQuadOperand(
+    int offset) {
+  if (TargetSupportsUnalignedAccess()) {
+    return IsLoad(
+        MachineType::Uint32(),
+        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+        IsIntPtrAdd(
+            IsParameter(
+                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+            IsIntPtrConstant(offset)));
+  } else {
+#if V8_TARGET_LITTLE_ENDIAN
+    const int kStep = -1;
+    const int kMsbOffset = 3;
+#elif V8_TARGET_BIG_ENDIAN
+    const int kStep = 1;
+    const int kMsbOffset = 0;
+#else
+#error "Unknown Architecture"
+#endif
+    Matcher<Node*> bytes[4];
+    for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
+      bytes[i] = IsLoad(
+          MachineType::Uint8(),
+          IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+          IsIntPtrAdd(
+              IsParameter(
+                  InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+              IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
+    }
+    return IsWord32Or(
+        IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
+        IsWord32Or(
+            IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
+            IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
+                       bytes[3])));
+  }
+}
+
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
+    int offset) {
+  Matcher<Node*> load_matcher;
+  if (TargetSupportsUnalignedAccess()) {
+    load_matcher = IsLoad(
+        MachineType::Int32(),
+        IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+        IsIntPtrAdd(
+            IsParameter(
+                InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+            IsIntPtrConstant(offset)));
+  } else {
+#if V8_TARGET_LITTLE_ENDIAN
+    const int kStep = -1;
+    int kMsbOffset = 3;
+#elif V8_TARGET_BIG_ENDIAN
+    const int kStep = 1;
+    int kMsbOffset = 0;
+#else
+#error "Unknown Architecture"
+#endif
+    Matcher<Node*> bytes[4];
+    for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
+      bytes[i] = IsLoad(
+          (i == 0) ? MachineType::Int8() : MachineType::Uint8(),
+          IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
+          IsIntPtrAdd(
+              IsParameter(
+                  InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
+              IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
+    }
+    load_matcher = IsWord32Or(
+        IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
+        IsWord32Or(
+            IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
+            IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
+                       bytes[3])));
+  }
+
+  if (kPointerSize == 8) {
+    load_matcher = IsChangeInt32ToInt64(load_matcher);
+  }
+  return load_matcher;
+}
+
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedOperand(
+    int offset, OperandSize operand_size) {
+  switch (operand_size) {
+    case OperandSize::kByte:
+      return IsSignedByteOperand(offset);
+    case OperandSize::kShort:
+      return IsSignedShortOperand(offset);
+    case OperandSize::kQuad:
+      return IsSignedQuadOperand(offset);
+    case OperandSize::kNone:
+      UNREACHABLE();
+  }
+  return nullptr;
+}
+
+Matcher<Node*>
+InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedOperand(
+    int offset, OperandSize operand_size) {
+  switch (operand_size) {
+    case OperandSize::kByte:
+      return IsUnsignedByteOperand(offset);
+    case OperandSize::kShort:
+      return IsUnsignedShortOperand(offset);
+    case OperandSize::kQuad:
+      return IsUnsignedQuadOperand(offset);
+    case OperandSize::kNone:
+      UNREACHABLE();
+  }
+  return nullptr;
+}
+
 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
@@ -195,18 +320,22 @@
     EXPECT_EQ(1, end->InputCount());
     Node* tail_call_node = end->InputAt(0);
 
+    OperandScale operand_scale = OperandScale::kSingle;
     Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
         IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-        IsInt32Constant(interpreter::Bytecodes::Size(bytecode)));
+        IsIntPtrConstant(
+            interpreter::Bytecodes::Size(bytecode, operand_scale)));
     Matcher<Node*> target_bytecode_matcher = m.IsLoad(
         MachineType::Uint8(),
         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
         next_bytecode_offset_matcher);
+    if (kPointerSize == 8) {
+      target_bytecode_matcher = IsChangeUint32ToUint64(target_bytecode_matcher);
+    }
     Matcher<Node*> code_target_matcher = m.IsLoad(
         MachineType::Pointer(),
         IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
-        IsWord32Shl(target_bytecode_matcher,
-                    IsInt32Constant(kPointerSizeLog2)));
+        IsWordShl(target_bytecode_matcher, IsIntPtrConstant(kPointerSizeLog2)));
 
     EXPECT_THAT(
         tail_call_node,
@@ -230,7 +359,7 @@
   TRACED_FOREACH(int, jump_offset, jump_offsets) {
     TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
       InterpreterAssemblerForTest m(this, bytecode);
-      m.Jump(m.Int32Constant(jump_offset));
+      m.Jump(m.IntPtrConstant(jump_offset));
       Graph* graph = m.graph();
       Node* end = graph->end();
       EXPECT_EQ(1, end->InputCount());
@@ -238,14 +367,18 @@
 
       Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-          IsInt32Constant(jump_offset));
+          IsIntPtrConstant(jump_offset));
       Matcher<Node*> target_bytecode_matcher =
           m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
+      if (kPointerSize == 8) {
+        target_bytecode_matcher =
+            IsChangeUint32ToUint64(target_bytecode_matcher);
+      }
       Matcher<Node*> code_target_matcher = m.IsLoad(
           MachineType::Pointer(),
           IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
-          IsWord32Shl(target_bytecode_matcher,
-                      IsInt32Constant(kPointerSizeLog2)));
+          IsWordShl(target_bytecode_matcher,
+                    IsIntPtrConstant(kPointerSizeLog2)));
 
       EXPECT_THAT(
           tail_call_node,
@@ -275,24 +408,29 @@
     InterpreterAssemblerForTest m(this, bytecode);
     Node* lhs = m.IntPtrConstant(0);
     Node* rhs = m.IntPtrConstant(1);
-    m.JumpIfWordEqual(lhs, rhs, m.Int32Constant(kJumpIfTrueOffset));
+    m.JumpIfWordEqual(lhs, rhs, m.IntPtrConstant(kJumpIfTrueOffset));
     Graph* graph = m.graph();
     Node* end = graph->end();
     EXPECT_EQ(2, end->InputCount());
 
-    int jump_offsets[] = {kJumpIfTrueOffset,
-                          interpreter::Bytecodes::Size(bytecode)};
+    OperandScale operand_scale = OperandScale::kSingle;
+    int jump_offsets[] = {kJumpIfTrueOffset, interpreter::Bytecodes::Size(
+                                                 bytecode, operand_scale)};
     for (int i = 0; i < static_cast<int>(arraysize(jump_offsets)); i++) {
       Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
-          IsInt32Constant(jump_offsets[i]));
+          IsIntPtrConstant(jump_offsets[i]));
       Matcher<Node*> target_bytecode_matcher =
           m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
+      if (kPointerSize == 8) {
+        target_bytecode_matcher =
+            IsChangeUint32ToUint64(target_bytecode_matcher);
+      }
       Matcher<Node*> code_target_matcher = m.IsLoad(
           MachineType::Pointer(),
           IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
-          IsWord32Shl(target_bytecode_matcher,
-                      IsInt32Constant(kPointerSizeLog2)));
+          IsWordShl(target_bytecode_matcher,
+                    IsIntPtrConstant(kPointerSizeLog2)));
       EXPECT_THAT(
           end->InputAt(i),
           IsTailCall(
@@ -342,51 +480,55 @@
 }
 
 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
+  static const OperandScale kOperandScales[] = {
+      OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
-    InterpreterAssemblerForTest m(this, bytecode);
-    int number_of_operands = interpreter::Bytecodes::NumberOfOperands(bytecode);
-    for (int i = 0; i < number_of_operands; i++) {
-      int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i);
-      switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) {
-        case interpreter::OperandType::kRegCount8:
-          EXPECT_THAT(m.BytecodeOperandCount(i), m.IsBytecodeOperand(offset));
-          break;
-        case interpreter::OperandType::kIdx8:
-          EXPECT_THAT(m.BytecodeOperandIdx(i), m.IsBytecodeOperand(offset));
-          break;
-        case interpreter::OperandType::kImm8:
-          EXPECT_THAT(m.BytecodeOperandImm(i),
-                      m.IsBytecodeOperandSignExtended(offset));
-          break;
-        case interpreter::OperandType::kMaybeReg8:
-        case interpreter::OperandType::kReg8:
-        case interpreter::OperandType::kRegOut8:
-        case interpreter::OperandType::kRegOutPair8:
-        case interpreter::OperandType::kRegOutTriple8:
-        case interpreter::OperandType::kRegPair8:
-          EXPECT_THAT(m.BytecodeOperandReg(i),
-                      m.IsBytecodeOperandSignExtended(offset));
-          break;
-        case interpreter::OperandType::kRegCount16:
-          EXPECT_THAT(m.BytecodeOperandCount(i),
-                      m.IsBytecodeOperandShort(offset));
-          break;
-        case interpreter::OperandType::kIdx16:
-          EXPECT_THAT(m.BytecodeOperandIdx(i),
-                      m.IsBytecodeOperandShort(offset));
-          break;
-        case interpreter::OperandType::kMaybeReg16:
-        case interpreter::OperandType::kReg16:
-        case interpreter::OperandType::kRegOut16:
-        case interpreter::OperandType::kRegOutPair16:
-        case interpreter::OperandType::kRegOutTriple16:
-        case interpreter::OperandType::kRegPair16:
-          EXPECT_THAT(m.BytecodeOperandReg(i),
-                      m.IsBytecodeOperandShortSignExtended(offset));
-          break;
-        case interpreter::OperandType::kNone:
-          UNREACHABLE();
-          break;
+    TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
+      InterpreterAssemblerForTest m(this, bytecode, operand_scale);
+      int number_of_operands =
+          interpreter::Bytecodes::NumberOfOperands(bytecode);
+      for (int i = 0; i < number_of_operands; i++) {
+        int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i,
+                                                              operand_scale);
+        OperandType operand_type =
+            interpreter::Bytecodes::GetOperandType(bytecode, i);
+        OperandSize operand_size =
+            Bytecodes::SizeOfOperand(operand_type, operand_scale);
+        switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) {
+          case interpreter::OperandType::kRegCount:
+            EXPECT_THAT(m.BytecodeOperandCount(i),
+                        m.IsUnsignedOperand(offset, operand_size));
+            break;
+          case interpreter::OperandType::kFlag8:
+            EXPECT_THAT(m.BytecodeOperandFlag(i),
+                        m.IsUnsignedOperand(offset, operand_size));
+            break;
+          case interpreter::OperandType::kIdx:
+            EXPECT_THAT(m.BytecodeOperandIdx(i),
+                        m.IsUnsignedOperand(offset, operand_size));
+            break;
+          case interpreter::OperandType::kImm: {
+            EXPECT_THAT(m.BytecodeOperandImm(i),
+                        m.IsSignedOperand(offset, operand_size));
+            break;
+          }
+          case interpreter::OperandType::kMaybeReg:
+          case interpreter::OperandType::kReg:
+          case interpreter::OperandType::kRegOut:
+          case interpreter::OperandType::kRegOutPair:
+          case interpreter::OperandType::kRegOutTriple:
+          case interpreter::OperandType::kRegPair:
+            EXPECT_THAT(m.BytecodeOperandReg(i),
+                        m.IsSignedOperand(offset, operand_size));
+            break;
+          case interpreter::OperandType::kRuntimeId:
+            EXPECT_THAT(m.BytecodeOperandRuntimeId(i),
+                        m.IsUnsignedOperand(offset, operand_size));
+            break;
+          case interpreter::OperandType::kNone:
+            UNREACHABLE();
+            break;
+        }
       }
     }
   }
@@ -394,12 +536,16 @@
 
 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
+    if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) ||
+        !interpreter::Bytecodes::WritesAccumulator(bytecode)) {
+      continue;
+    }
+
     InterpreterAssemblerForTest m(this, bytecode);
     // Should be incoming accumulator if not set.
     EXPECT_THAT(
         m.GetAccumulator(),
         IsParameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
-
     // Should be set by SetAccumulator.
     Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef);
     m.SetAccumulator(accumulator_value_1);
@@ -433,27 +579,27 @@
 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* reg_index_node = m.Int32Constant(44);
+    Node* reg_index_node = m.IntPtrConstant(44);
     Node* reg_location_node = m.RegisterLocation(reg_index_node);
     EXPECT_THAT(
         reg_location_node,
         IsIntPtrAdd(
             IsParameter(InterpreterDispatchDescriptor::kRegisterFileParameter),
-            IsWordShl(reg_index_node, IsInt32Constant(kPointerSizeLog2))));
+            IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2))));
   }
 }
 
 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* reg_index_node = m.Int32Constant(44);
+    Node* reg_index_node = m.IntPtrConstant(44);
     Node* load_reg_node = m.LoadRegister(reg_index_node);
     EXPECT_THAT(
         load_reg_node,
         m.IsLoad(
             MachineType::AnyTagged(),
             IsParameter(InterpreterDispatchDescriptor::kRegisterFileParameter),
-            IsWordShl(reg_index_node, IsInt32Constant(kPointerSizeLog2))));
+            IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2))));
   }
 }
 
@@ -461,7 +607,7 @@
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
     Node* store_value = m.Int32Constant(0xdeadbeef);
-    Node* reg_index_node = m.Int32Constant(44);
+    Node* reg_index_node = m.IntPtrConstant(44);
     Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
     EXPECT_THAT(
         store_reg_node,
@@ -469,7 +615,7 @@
             StoreRepresentation(MachineRepresentation::kTagged,
                                 kNoWriteBarrier),
             IsParameter(InterpreterDispatchDescriptor::kRegisterFileParameter),
-            IsWordShl(reg_index_node, IsInt32Constant(kPointerSizeLog2)),
+            IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)),
             store_value));
   }
 }
@@ -478,10 +624,12 @@
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
     Node* value = m.Int32Constant(44);
-    EXPECT_THAT(m.SmiTag(value),
-                IsWordShl(value, IsInt32Constant(kSmiShiftSize + kSmiTagSize)));
-    EXPECT_THAT(m.SmiUntag(value),
-                IsWordSar(value, IsInt32Constant(kSmiShiftSize + kSmiTagSize)));
+    EXPECT_THAT(
+        m.SmiTag(value),
+        IsWordShl(value, IsIntPtrConstant(kSmiShiftSize + kSmiTagSize)));
+    EXPECT_THAT(
+        m.SmiUntag(value),
+        IsWordSar(value, IsIntPtrConstant(kSmiShiftSize + kSmiTagSize)));
   }
 }
 
@@ -508,16 +656,16 @@
 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* a = m.Int32Constant(0);
+    Node* a = m.IntPtrConstant(0);
     Node* add = m.WordShl(a, 10);
-    EXPECT_THAT(add, IsWordShl(a, IsInt32Constant(10)));
+    EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
   }
 }
 
 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* index = m.Int32Constant(2);
+    Node* index = m.IntPtrConstant(2);
     Node* load_constant = m.LoadConstantPoolEntry(index);
     Matcher<Node*> constant_pool_matcher = m.IsLoad(
         MachineType::AnyTagged(),
@@ -528,23 +676,7 @@
         m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
                  IsIntPtrAdd(
                      IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
-                     IsWordShl(index, IsInt32Constant(kPointerSizeLog2)))));
-  }
-}
-
-TARGET_TEST_F(InterpreterAssemblerTest, LoadFixedArrayElement) {
-  TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
-    InterpreterAssemblerForTest m(this, bytecode);
-    int index = 3;
-    Node* fixed_array = m.IntPtrConstant(0xdeadbeef);
-    Node* load_element = m.LoadFixedArrayElement(fixed_array, index);
-    EXPECT_THAT(
-        load_element,
-        m.IsLoad(MachineType::AnyTagged(), fixed_array,
-                 IsIntPtrAdd(
-                     IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
-                     IsWordShl(IsInt32Constant(index),
-                               IsInt32Constant(kPointerSizeLog2)))));
+                     IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
   }
 }
 
@@ -563,13 +695,13 @@
 TARGET_TEST_F(InterpreterAssemblerTest, LoadContextSlot) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* context = m.Int32Constant(1);
-    Node* slot_index = m.Int32Constant(22);
+    Node* context = m.IntPtrConstant(1);
+    Node* slot_index = m.IntPtrConstant(22);
     Node* load_context_slot = m.LoadContextSlot(context, slot_index);
 
     Matcher<Node*> offset =
-        IsIntPtrAdd(IsWordShl(slot_index, IsInt32Constant(kPointerSizeLog2)),
-                    IsInt32Constant(Context::kHeaderSize - kHeapObjectTag));
+        IsIntPtrAdd(IsWordShl(slot_index, IsIntPtrConstant(kPointerSizeLog2)),
+                    IsIntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
     EXPECT_THAT(load_context_slot,
                 m.IsLoad(MachineType::AnyTagged(), context, offset));
   }
@@ -578,14 +710,14 @@
 TARGET_TEST_F(InterpreterAssemblerTest, StoreContextSlot) {
   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
     InterpreterAssemblerForTest m(this, bytecode);
-    Node* context = m.Int32Constant(1);
-    Node* slot_index = m.Int32Constant(22);
-    Node* value = m.Int32Constant(100);
+    Node* context = m.IntPtrConstant(1);
+    Node* slot_index = m.IntPtrConstant(22);
+    Node* value = m.SmiConstant(Smi::FromInt(100));
     Node* store_context_slot = m.StoreContextSlot(context, slot_index, value);
 
     Matcher<Node*> offset =
-        IsIntPtrAdd(IsWordShl(slot_index, IsInt32Constant(kPointerSizeLog2)),
-                    IsInt32Constant(Context::kHeaderSize - kHeapObjectTag));
+        IsIntPtrAdd(IsWordShl(slot_index, IsIntPtrConstant(kPointerSizeLog2)),
+                    IsIntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
     EXPECT_THAT(store_context_slot,
                 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged,
                                               kFullWriteBarrier),
@@ -629,7 +761,7 @@
           IsInt32Mul(function_id, IsInt32Constant(sizeof(Runtime::Function))));
       Matcher<Node*> function_entry =
           m.IsLoad(MachineType::Pointer(), function,
-                   IsInt32Constant(offsetof(Runtime::Function, entry)));
+                   IsIntPtrConstant(offsetof(Runtime::Function, entry)));
 
       Node* call_runtime = m.CallRuntimeN(function_id, context, first_arg,
                                           arg_count, result_size);
diff --git a/test/unittests/interpreter/interpreter-assembler-unittest.h b/test/unittests/interpreter/interpreter-assembler-unittest.h
index 321c724..1ebdc77 100644
--- a/test/unittests/interpreter/interpreter-assembler-unittest.h
+++ b/test/unittests/interpreter/interpreter-assembler-unittest.h
@@ -23,10 +23,12 @@
 
   class InterpreterAssemblerForTest final : public InterpreterAssembler {
    public:
-    InterpreterAssemblerForTest(InterpreterAssemblerTest* test,
-                                Bytecode bytecode)
-        : InterpreterAssembler(test->isolate(), test->zone(), bytecode) {}
-    ~InterpreterAssemblerForTest() override {}
+    InterpreterAssemblerForTest(
+        InterpreterAssemblerTest* test, Bytecode bytecode,
+        OperandScale operand_scale = OperandScale::kSingle)
+        : InterpreterAssembler(test->isolate(), test->zone(), bytecode,
+                               operand_scale) {}
+    ~InterpreterAssemblerForTest() override;
 
     Matcher<compiler::Node*> IsLoad(
         const Matcher<compiler::LoadRepresentation>& rep_matcher,
@@ -38,10 +40,17 @@
         const Matcher<compiler::Node*>& index_matcher,
         const Matcher<compiler::Node*>& value_matcher);
 
-    Matcher<compiler::Node*> IsBytecodeOperand(int offset);
-    Matcher<compiler::Node*> IsBytecodeOperandSignExtended(int offset);
-    Matcher<compiler::Node*> IsBytecodeOperandShort(int offset);
-    Matcher<compiler::Node*> IsBytecodeOperandShortSignExtended(int offset);
+    Matcher<compiler::Node*> IsUnsignedByteOperand(int offset);
+    Matcher<compiler::Node*> IsSignedByteOperand(int offset);
+    Matcher<compiler::Node*> IsUnsignedShortOperand(int offset);
+    Matcher<compiler::Node*> IsSignedShortOperand(int offset);
+    Matcher<compiler::Node*> IsUnsignedQuadOperand(int offset);
+    Matcher<compiler::Node*> IsSignedQuadOperand(int offset);
+
+    Matcher<compiler::Node*> IsSignedOperand(int offset,
+                                             OperandSize operand_size);
+    Matcher<compiler::Node*> IsUnsignedOperand(int offset,
+                                               OperandSize operand_size);
 
     using InterpreterAssembler::graph;
 
diff --git a/test/unittests/interpreter/register-translator-unittest.cc b/test/unittests/interpreter/register-translator-unittest.cc
deleted file mode 100644
index e9f65a6..0000000
--- a/test/unittests/interpreter/register-translator-unittest.cc
+++ /dev/null
@@ -1,260 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stack>
-
-#include "src/v8.h"
-
-#include "src/interpreter/register-translator.h"
-#include "src/isolate.h"
-#include "test/unittests/test-utils.h"
-
-namespace v8 {
-namespace internal {
-namespace interpreter {
-
-class RegisterTranslatorTest : public TestWithIsolateAndZone,
-                               private RegisterMover {
- public:
-  RegisterTranslatorTest() : translator_(this), move_count_(0) {
-    window_start_ =
-        RegisterTranslator::DistanceToTranslationWindow(Register(0));
-    window_width_ =
-        Register::MaxRegisterIndexForByteOperand() - window_start_ + 1;
-  }
-
-  ~RegisterTranslatorTest() override {}
-
-  bool PopMoveAndMatch(Register from, Register to) {
-    if (!moves_.empty()) {
-      CHECK(from.is_valid() && to.is_valid());
-      const std::pair<Register, Register> top = moves_.top();
-      moves_.pop();
-      return top.first == from && top.second == to;
-    } else {
-      return false;
-    }
-  }
-
-  int move_count() const { return move_count_; }
-  RegisterTranslator* translator() { return &translator_; }
-
-  int window_start() const { return window_start_; }
-  int window_width() const { return window_width_; }
-  int window_limit() const { return window_start_ + window_width_; }
-
- protected:
-  static const char* const kBadOperandRegex;
-
- private:
-  void MoveRegisterUntranslated(Register from, Register to) override {
-    moves_.push(std::make_pair(from, to));
-    move_count_++;
-  }
-
-  RegisterTranslator translator_;
-  std::stack<std::pair<Register, Register>> moves_;
-  int move_count_;
-  int window_start_;
-  int window_width_;
-};
-
-const char* const RegisterTranslatorTest::kBadOperandRegex =
-    ".*OperandType::kReg8 \\|\\| .*OperandType::kRegOut8\\) && "
-    "RegisterIsMovableToWindow.*";
-
-TEST_F(RegisterTranslatorTest, TestFrameSizeAdjustmentsForTranslationWindow) {
-  EXPECT_EQ(0, RegisterTranslator::RegisterCountAdjustment(0, 0));
-  EXPECT_EQ(0, RegisterTranslator::RegisterCountAdjustment(10, 10));
-  EXPECT_EQ(window_width(),
-            RegisterTranslator::RegisterCountAdjustment(173, 0));
-  EXPECT_EQ(window_width(),
-            RegisterTranslator::RegisterCountAdjustment(173, 137));
-  EXPECT_EQ(window_width(),
-            RegisterTranslator::RegisterCountAdjustment(173, 137));
-  // TODO(oth): Add a kMaxParameters8 that derives this info from the frame.
-  int param_limit = FLAG_enable_embedded_constant_pool ? 119 : 120;
-  EXPECT_EQ(0, RegisterTranslator::RegisterCountAdjustment(0, param_limit));
-  EXPECT_EQ(window_limit(),
-            RegisterTranslator::RegisterCountAdjustment(0, 128));
-  EXPECT_EQ(window_limit(),
-            RegisterTranslator::RegisterCountAdjustment(0, 129));
-  EXPECT_EQ(window_limit() - 32,
-            RegisterTranslator::RegisterCountAdjustment(32, 129));
-}
-
-TEST_F(RegisterTranslatorTest, TestInTranslationWindow) {
-  EXPECT_GE(window_start(), 0);
-  EXPECT_FALSE(
-      RegisterTranslator::InTranslationWindow(Register(window_start() - 1)));
-  EXPECT_TRUE(RegisterTranslator::InTranslationWindow(
-      Register(Register::MaxRegisterIndexForByteOperand())));
-  EXPECT_FALSE(RegisterTranslator::InTranslationWindow(
-      Register(Register::MaxRegisterIndexForByteOperand() + 1)));
-  for (int index = window_start(); index < window_limit(); index += 1) {
-    EXPECT_TRUE(RegisterTranslator::InTranslationWindow(Register(index)));
-  }
-}
-
-TEST_F(RegisterTranslatorTest, FitsInReg8Operand) {
-  EXPECT_GT(window_start(), 0);
-  EXPECT_TRUE(RegisterTranslator::FitsInReg8Operand(
-      Register::FromParameterIndex(0, 3)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg8Operand(
-      Register::FromParameterIndex(2, 3)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg8Operand(Register(0)));
-  EXPECT_TRUE(
-      RegisterTranslator::FitsInReg8Operand(Register(window_start() - 1)));
-  EXPECT_FALSE(RegisterTranslator::FitsInReg8Operand(Register(kMaxInt8)));
-  EXPECT_FALSE(RegisterTranslator::FitsInReg8Operand(Register(kMaxInt8 + 1)));
-  for (int index = window_start(); index < window_limit(); index += 1) {
-    EXPECT_FALSE(RegisterTranslator::FitsInReg8Operand(Register(index)));
-  }
-}
-
-TEST_F(RegisterTranslatorTest, FitsInReg16Operand) {
-  EXPECT_GT(window_start(), 0);
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(
-      Register::FromParameterIndex(0, 3)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(
-      Register::FromParameterIndex(2, 3)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(
-      Register::FromParameterIndex(0, 999)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(
-      Register::FromParameterIndex(0, Register::MaxParameterIndex() + 1)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(Register(0)));
-  EXPECT_TRUE(
-      RegisterTranslator::FitsInReg16Operand(Register(window_start() - 1)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(Register(kMaxInt8 + 1)));
-  EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(Register(kMaxInt8 + 2)));
-  for (int index = 0; index <= kMaxInt16 - window_width(); index += 1) {
-    EXPECT_TRUE(RegisterTranslator::FitsInReg16Operand(Register(index)));
-  }
-  for (int index = Register::MaxRegisterIndex() - window_width() + 1;
-       index < Register::MaxRegisterIndex() + 2; index += 1) {
-    EXPECT_FALSE(RegisterTranslator::FitsInReg16Operand(Register(index)));
-  }
-}
-
-TEST_F(RegisterTranslatorTest, NoTranslationRequired) {
-  Register window_reg(window_start());
-  Register local_reg(57);
-  uint32_t operands[] = {local_reg.ToRawOperand()};
-  translator()->TranslateInputRegisters(Bytecode::kLdar, operands, 1);
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(0, move_count());
-
-  Register param_reg = Register::FromParameterIndex(129, 130);
-  operands[0] = param_reg.ToRawOperand();
-  translator()->TranslateInputRegisters(Bytecode::kAdd, operands, 1);
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(0, move_count());
-}
-
-TEST_F(RegisterTranslatorTest, TranslationRequired) {
-  Register window_reg(window_start());
-  Register local_reg(137);
-  Register local_reg_translated(local_reg.index() + window_width());
-
-  uint32_t operands[] = {local_reg.ToRawOperand()};
-  translator()->TranslateInputRegisters(Bytecode::kLdar, operands, 1);
-  EXPECT_EQ(1, move_count());
-  EXPECT_TRUE(PopMoveAndMatch(local_reg_translated, window_reg));
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(1, move_count());
-  EXPECT_FALSE(PopMoveAndMatch(window_reg, local_reg_translated));
-
-  operands[0] = local_reg.ToRawOperand();
-  translator()->TranslateInputRegisters(Bytecode::kStar, operands, 1);
-  EXPECT_EQ(1, move_count());
-  EXPECT_FALSE(PopMoveAndMatch(local_reg_translated, window_reg));
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(2, move_count());
-  EXPECT_TRUE(PopMoveAndMatch(window_reg, local_reg_translated));
-
-  Register param_reg = Register::FromParameterIndex(0, 130);
-  operands[0] = {param_reg.ToRawOperand()};
-  translator()->TranslateInputRegisters(Bytecode::kLdar, operands, 1);
-  EXPECT_EQ(3, move_count());
-  EXPECT_TRUE(PopMoveAndMatch(param_reg, window_reg));
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(3, move_count());
-  EXPECT_FALSE(PopMoveAndMatch(window_reg, param_reg));
-
-  operands[0] = {param_reg.ToRawOperand()};
-  translator()->TranslateInputRegisters(Bytecode::kStar, operands, 1);
-  EXPECT_EQ(3, move_count());
-  EXPECT_FALSE(PopMoveAndMatch(local_reg_translated, window_reg));
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(4, move_count());
-  EXPECT_TRUE(PopMoveAndMatch(window_reg, param_reg));
-}
-
-TEST_F(RegisterTranslatorTest, RangeTranslation) {
-  Register window0(window_start());
-  Register window1(window_start() + 1);
-  Register window2(window_start() + 2);
-  uint32_t operands[3];
-
-  // Bytecode::kNew with valid range operand.
-  Register constructor0(0);
-  Register args0(1);
-  operands[0] = constructor0.ToRawOperand();
-  operands[1] = args0.ToRawOperand();
-  operands[2] = 1;
-  translator()->TranslateInputRegisters(Bytecode::kNew, operands, 3);
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(0, move_count());
-
-  // Bytecode::kNewWide with valid range operand.
-  Register constructor1(128);
-  Register constructor1_translated(constructor1.index() + window_width());
-  Register args1(129);
-  Register args1_translated(args1.index() + window_width());
-  operands[0] = constructor1.ToRawOperand();
-  operands[1] = args1.ToRawOperand();
-  operands[2] = 3;
-  translator()->TranslateInputRegisters(Bytecode::kNewWide, operands, 3);
-  translator()->TranslateOutputRegisters();
-  EXPECT_EQ(0, move_count());
-}
-
-TEST_F(RegisterTranslatorTest, BadRange0) {
-  // Bytecode::kNew with invalid range operand (kMaybeReg8).
-  Register constructor1(128);
-  Register args1(129);
-  uint32_t operands[] = {constructor1.ToRawOperand(), args1.ToRawOperand(), 3};
-  ASSERT_DEATH_IF_SUPPORTED(
-      translator()->TranslateInputRegisters(Bytecode::kNew, operands, 3),
-      kBadOperandRegex);
-}
-
-TEST_F(RegisterTranslatorTest, BadRange1) {
-  // Bytecode::kForInPrepare with invalid range operand (kRegTriple8)
-  Register for_in_state(160);
-  Register for_in_state_translated(for_in_state.index() + window_width());
-  uint32_t operands[] = {for_in_state.ToRawOperand()};
-  ASSERT_DEATH_IF_SUPPORTED(translator()->TranslateInputRegisters(
-                                Bytecode::kForInPrepare, operands, 1),
-                            kBadOperandRegex);
-}
-
-TEST_F(RegisterTranslatorTest, BadRange2) {
-  // Bytecode::kForInNext with invalid range operand (kRegPair8)
-  Register receiver(192);
-  Register receiver_translated(receiver.index() + window_width());
-  Register index(193);
-  Register index_translated(index.index() + window_width());
-  Register cache_info_pair(194);
-  Register cache_info_pair_translated(cache_info_pair.index() + window_width());
-  uint32_t operands[] = {receiver.ToRawOperand(), index.ToRawOperand(),
-                         cache_info_pair.ToRawOperand()};
-  ASSERT_DEATH_IF_SUPPORTED(
-      translator()->TranslateInputRegisters(Bytecode::kForInNext, operands, 3),
-      kBadOperandRegex);
-}
-
-}  // namespace interpreter
-}  // namespace internal
-}  // namespace v8
diff --git a/test/unittests/interpreter/source-position-table-unittest.cc b/test/unittests/interpreter/source-position-table-unittest.cc
new file mode 100644
index 0000000..d62302a
--- /dev/null
+++ b/test/unittests/interpreter/source-position-table-unittest.cc
@@ -0,0 +1,84 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/interpreter/source-position-table.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+class SourcePositionTableTest : public TestWithIsolateAndZone {
+ public:
+  SourcePositionTableTest() {}
+  ~SourcePositionTableTest() override {}
+};
+
+// Some random offsets, mostly at 'suspicious' bit boundaries.
+static int offsets[] = {0,   1,   2,    3,    4,     30,      31,  32,
+                        33,  62,  63,   64,   65,    126,     127, 128,
+                        129, 250, 1000, 9999, 12000, 31415926};
+
+TEST_F(SourcePositionTableTest, EncodeStatement) {
+  SourcePositionTableBuilder builder(isolate(), zone());
+  for (int i = 0; i < arraysize(offsets); i++) {
+    builder.AddStatementPosition(offsets[i], offsets[i]);
+  }
+
+  // To test correctness, we rely on the assertions in ToSourcePositionTable().
+  // (Also below.)
+  CHECK(!builder.ToSourcePositionTable().is_null());
+}
+
+TEST_F(SourcePositionTableTest, EncodeStatementDuplicates) {
+  SourcePositionTableBuilder builder(isolate(), zone());
+  for (int i = 0; i < arraysize(offsets); i++) {
+    builder.AddStatementPosition(offsets[i], offsets[i]);
+    builder.AddStatementPosition(offsets[i], offsets[i] + 1);
+  }
+
+  // To test correctness, we rely on the assertions in ToSourcePositionTable().
+  // (Also below.)
+  CHECK(!builder.ToSourcePositionTable().is_null());
+}
+
+TEST_F(SourcePositionTableTest, EncodeExpression) {
+  SourcePositionTableBuilder builder(isolate(), zone());
+  for (int i = 0; i < arraysize(offsets); i++) {
+    builder.AddExpressionPosition(offsets[i], offsets[i]);
+  }
+  CHECK(!builder.ToSourcePositionTable().is_null());
+}
+
+TEST_F(SourcePositionTableTest, EncodeAscending) {
+  SourcePositionTableBuilder builder(isolate(), zone());
+
+  int accumulator = 0;
+  for (int i = 0; i < arraysize(offsets); i++) {
+    accumulator += offsets[i];
+    if (i % 2) {
+      builder.AddStatementPosition(accumulator, accumulator);
+    } else {
+      builder.AddExpressionPosition(accumulator, accumulator);
+    }
+  }
+
+  // Also test negative offsets:
+  for (int i = 0; i < arraysize(offsets); i++) {
+    accumulator -= offsets[i];
+    if (i % 2) {
+      builder.AddStatementPosition(accumulator, accumulator);
+    } else {
+      builder.AddExpressionPosition(accumulator, accumulator);
+    }
+  }
+
+  CHECK(!builder.ToSourcePositionTable().is_null());
+}
+
+}  // namespace interpreter
+}  // namespace internal
+}  // namespace v8