blob: a42da50f8312ddcf9a226ffc006f32be8a345171 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter.h"
6
Ben Murdochc5610432016-08-08 18:44:38 +01007#include <fstream>
8
Ben Murdoch097c5b22016-05-18 11:27:45 +01009#include "src/ast/prettyprinter.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/code-factory.h"
11#include "src/compiler.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/factory.h"
13#include "src/interpreter/bytecode-generator.h"
14#include "src/interpreter/bytecodes.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010015#include "src/interpreter/interpreter-assembler.h"
Ben Murdochda12d292016-06-02 14:46:10 +010016#include "src/interpreter/interpreter-intrinsics.h"
17#include "src/log.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000018#include "src/zone.h"
19
20namespace v8 {
21namespace internal {
22namespace interpreter {
23
24using compiler::Node;
Ben Murdochc5610432016-08-08 18:44:38 +010025typedef CodeStubAssembler::Label Label;
26typedef CodeStubAssembler::Variable Variable;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027
28#define __ assembler->
29
Ben Murdoch097c5b22016-05-18 11:27:45 +010030Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) {
Ben Murdochda12d292016-06-02 14:46:10 +010031 memset(dispatch_table_, 0, sizeof(dispatch_table_));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032}
33
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034void Interpreter::Initialize() {
Ben Murdoch097c5b22016-05-18 11:27:45 +010035 if (IsDispatchTableInitialized()) return;
Ben Murdochda12d292016-06-02 14:46:10 +010036 Zone zone(isolate_->allocator());
Ben Murdoch097c5b22016-05-18 11:27:45 +010037 HandleScope scope(isolate_);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038
Ben Murdochc5610432016-08-08 18:44:38 +010039 if (FLAG_trace_ignition_dispatches) {
40 static const int kBytecodeCount = static_cast<int>(Bytecode::kLast) + 1;
41 bytecode_dispatch_counters_table_.Reset(
42 new uintptr_t[kBytecodeCount * kBytecodeCount]);
43 memset(bytecode_dispatch_counters_table_.get(), 0,
44 sizeof(uintptr_t) * kBytecodeCount * kBytecodeCount);
45 }
46
Ben Murdochda12d292016-06-02 14:46:10 +010047 // Generate bytecode handlers for all bytecodes and scales.
Ben Murdochc5610432016-08-08 18:44:38 +010048 const OperandScale kOperandScales[] = {
49#define VALUE(Name, _) OperandScale::k##Name,
50 OPERAND_SCALE_LIST(VALUE)
51#undef VALUE
52 };
53
54 for (OperandScale operand_scale : kOperandScales) {
Ben Murdochda12d292016-06-02 14:46:10 +010055#define GENERATE_CODE(Name, ...) \
56 { \
57 if (Bytecodes::BytecodeHasHandler(Bytecode::k##Name, operand_scale)) { \
58 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name, \
59 operand_scale); \
60 Do##Name(&assembler); \
61 Handle<Code> code = assembler.GenerateCode(); \
62 size_t index = GetDispatchTableIndex(Bytecode::k##Name, operand_scale); \
Ben Murdochc5610432016-08-08 18:44:38 +010063 dispatch_table_[index] = code->entry(); \
Ben Murdochda12d292016-06-02 14:46:10 +010064 TraceCodegen(code); \
65 LOG_CODE_EVENT( \
66 isolate_, \
67 CodeCreateEvent( \
68 Logger::BYTECODE_HANDLER_TAG, AbstractCode::cast(*code), \
69 Bytecodes::ToString(Bytecode::k##Name, operand_scale).c_str())); \
70 } \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 }
Ben Murdochda12d292016-06-02 14:46:10 +010072 BYTECODE_LIST(GENERATE_CODE)
Ben Murdoch097c5b22016-05-18 11:27:45 +010073#undef GENERATE_CODE
Ben Murdochda12d292016-06-02 14:46:10 +010074 }
75
76 // Fill unused entries will the illegal bytecode handler.
77 size_t illegal_index =
78 GetDispatchTableIndex(Bytecode::kIllegal, OperandScale::kSingle);
79 for (size_t index = 0; index < arraysize(dispatch_table_); ++index) {
80 if (dispatch_table_[index] == nullptr) {
81 dispatch_table_[index] = dispatch_table_[illegal_index];
82 }
83 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084}
85
Ben Murdochda12d292016-06-02 14:46:10 +010086Code* Interpreter::GetBytecodeHandler(Bytecode bytecode,
87 OperandScale operand_scale) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010088 DCHECK(IsDispatchTableInitialized());
Ben Murdochda12d292016-06-02 14:46:10 +010089 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale));
90 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
Ben Murdochc5610432016-08-08 18:44:38 +010091 Address code_entry = dispatch_table_[index];
92 return Code::GetCodeFromTargetAddress(code_entry);
Ben Murdochda12d292016-06-02 14:46:10 +010093}
94
95// static
96size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
97 OperandScale operand_scale) {
98 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
99 size_t index = static_cast<size_t>(bytecode);
Ben Murdochc5610432016-08-08 18:44:38 +0100100 switch (operand_scale) {
101 case OperandScale::kSingle:
102 return index;
103 case OperandScale::kDouble:
104 return index + kEntriesPerOperandScale;
105 case OperandScale::kQuadruple:
106 return index + 2 * kEntriesPerOperandScale;
Ben Murdochda12d292016-06-02 14:46:10 +0100107 }
Ben Murdochc5610432016-08-08 18:44:38 +0100108 UNREACHABLE();
109 return 0;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100110}
111
112void Interpreter::IterateDispatchTable(ObjectVisitor* v) {
Ben Murdochc5610432016-08-08 18:44:38 +0100113 for (int i = 0; i < kDispatchTableSize; i++) {
114 Address code_entry = dispatch_table_[i];
115 Object* code = code_entry == nullptr
116 ? nullptr
117 : Code::GetCodeFromTargetAddress(code_entry);
118 Object* old_code = code;
119 v->VisitPointer(&code);
120 if (code != old_code) {
121 dispatch_table_[i] = reinterpret_cast<Code*>(code)->entry();
122 }
123 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100124}
125
126// static
127int Interpreter::InterruptBudget() {
128 // TODO(ignition): Tune code size multiplier.
129 const int kCodeSizeMultiplier = 32;
130 return FLAG_interrupt_budget * kCodeSizeMultiplier;
131}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000132
133bool Interpreter::MakeBytecode(CompilationInfo* info) {
Ben Murdochc5610432016-08-08 18:44:38 +0100134 RuntimeCallTimerScope runtimeTimer(info->isolate(),
135 &RuntimeCallStats::CompileIgnition);
Ben Murdochda12d292016-06-02 14:46:10 +0100136 TimerEventScope<TimerEventCompileIgnition> timer(info->isolate());
137 TRACE_EVENT0("v8", "V8.CompileIgnition");
138
Ben Murdoch097c5b22016-05-18 11:27:45 +0100139 if (FLAG_print_bytecode || FLAG_print_source || FLAG_print_ast) {
140 OFStream os(stdout);
141 base::SmartArrayPointer<char> name = info->GetDebugName();
142 os << "[generating bytecode for function: " << info->GetDebugName().get()
143 << "]" << std::endl
144 << std::flush;
145 }
146
147#ifdef DEBUG
148 if (info->parse_info() && FLAG_print_source) {
149 OFStream os(stdout);
150 os << "--- Source from AST ---" << std::endl
151 << PrettyPrinter(info->isolate()).PrintProgram(info->literal())
152 << std::endl
153 << std::flush;
154 }
155
156 if (info->parse_info() && FLAG_print_ast) {
157 OFStream os(stdout);
158 os << "--- AST ---" << std::endl
159 << AstPrinter(info->isolate()).PrintProgram(info->literal()) << std::endl
160 << std::flush;
161 }
162#endif // DEBUG
163
Ben Murdochc5610432016-08-08 18:44:38 +0100164 BytecodeGenerator generator(info);
165 Handle<BytecodeArray> bytecodes = generator.MakeBytecode();
Ben Murdochda12d292016-06-02 14:46:10 +0100166
167 if (generator.HasStackOverflow()) return false;
168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 if (FLAG_print_bytecode) {
170 OFStream os(stdout);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000171 bytecodes->Print(os);
172 os << std::flush;
173 }
174
175 info->SetBytecodeArray(bytecodes);
176 info->SetCode(info->isolate()->builtins()->InterpreterEntryTrampoline());
177 return true;
178}
179
Ben Murdoch097c5b22016-05-18 11:27:45 +0100180bool Interpreter::IsDispatchTableInitialized() {
Ben Murdochc5610432016-08-08 18:44:38 +0100181 if (FLAG_trace_ignition || FLAG_trace_ignition_codegen ||
182 FLAG_trace_ignition_dispatches) {
183 // Regenerate table to add bytecode tracing operations,
184 // print the assembly code generated by TurboFan,
185 // or instrument handlers with dispatch counters.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100186 return false;
187 }
188 return dispatch_table_[0] != nullptr;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000189}
190
Ben Murdochda12d292016-06-02 14:46:10 +0100191void Interpreter::TraceCodegen(Handle<Code> code) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100192#ifdef ENABLE_DISASSEMBLER
193 if (FLAG_trace_ignition_codegen) {
194 OFStream os(stdout);
Ben Murdochda12d292016-06-02 14:46:10 +0100195 code->Disassemble(nullptr, os);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100196 os << std::flush;
197 }
198#endif // ENABLE_DISASSEMBLER
199}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200
Ben Murdochda12d292016-06-02 14:46:10 +0100201const char* Interpreter::LookupNameOfBytecodeHandler(Code* code) {
202#ifdef ENABLE_DISASSEMBLER
Ben Murdochc5610432016-08-08 18:44:38 +0100203#define RETURN_NAME(Name, ...) \
204 if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == \
205 code->entry()) { \
206 return #Name; \
Ben Murdochda12d292016-06-02 14:46:10 +0100207 }
208 BYTECODE_LIST(RETURN_NAME)
209#undef RETURN_NAME
210#endif // ENABLE_DISASSEMBLER
211 return nullptr;
212}
213
Ben Murdochc5610432016-08-08 18:44:38 +0100214uintptr_t Interpreter::GetDispatchCounter(Bytecode from, Bytecode to) const {
215 int from_index = Bytecodes::ToByte(from);
216 int to_index = Bytecodes::ToByte(to);
217 return bytecode_dispatch_counters_table_[from_index * kNumberOfBytecodes +
218 to_index];
219}
220
221Local<v8::Object> Interpreter::GetDispatchCountersObject() {
222 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(isolate_);
223 Local<v8::Context> context = isolate->GetCurrentContext();
224
225 Local<v8::Object> counters_map = v8::Object::New(isolate);
226
227 // Output is a JSON-encoded object of objects.
228 //
229 // The keys on the top level object are source bytecodes,
230 // and corresponding value are objects. Keys on these last are the
231 // destinations of the dispatch and the value associated is a counter for
232 // the correspondent source-destination dispatch chain.
233 //
234 // Only non-zero counters are written to file, but an entry in the top-level
235 // object is always present, even if the value is empty because all counters
236 // for that source are zero.
237
238 for (int from_index = 0; from_index < kNumberOfBytecodes; ++from_index) {
239 Bytecode from_bytecode = Bytecodes::FromByte(from_index);
240 Local<v8::Object> counters_row = v8::Object::New(isolate);
241
242 for (int to_index = 0; to_index < kNumberOfBytecodes; ++to_index) {
243 Bytecode to_bytecode = Bytecodes::FromByte(to_index);
244 uintptr_t counter = GetDispatchCounter(from_bytecode, to_bytecode);
245
246 if (counter > 0) {
247 std::string to_name = Bytecodes::ToString(to_bytecode);
248 Local<v8::String> to_name_object =
249 v8::String::NewFromUtf8(isolate, to_name.c_str(),
250 NewStringType::kNormal)
251 .ToLocalChecked();
252 Local<v8::Number> counter_object = v8::Number::New(isolate, counter);
253 CHECK(counters_row->Set(context, to_name_object, counter_object)
254 .IsJust());
255 }
256 }
257
258 std::string from_name = Bytecodes::ToString(from_bytecode);
259 Local<v8::String> from_name_object =
260 v8::String::NewFromUtf8(isolate, from_name.c_str(),
261 NewStringType::kNormal)
262 .ToLocalChecked();
263
264 CHECK(counters_map->Set(context, from_name_object, counters_row).IsJust());
265 }
266
267 return counters_map;
268}
269
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270// LdaZero
271//
272// Load literal '0' into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100273void Interpreter::DoLdaZero(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 Node* zero_value = __ NumberConstant(0.0);
275 __ SetAccumulator(zero_value);
276 __ Dispatch();
277}
278
Ben Murdochda12d292016-06-02 14:46:10 +0100279// LdaSmi <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280//
Ben Murdochda12d292016-06-02 14:46:10 +0100281// Load an integer literal into the accumulator as a Smi.
282void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000283 Node* raw_int = __ BytecodeOperandImm(0);
284 Node* smi_int = __ SmiTag(raw_int);
285 __ SetAccumulator(smi_int);
286 __ Dispatch();
287}
288
Ben Murdoch097c5b22016-05-18 11:27:45 +0100289void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290 Node* index = __ BytecodeOperandIdx(0);
291 Node* constant = __ LoadConstantPoolEntry(index);
292 __ SetAccumulator(constant);
293 __ Dispatch();
294}
295
296
297// LdaConstant <idx>
298//
299// Load constant literal at |idx| in the constant pool into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100300void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 DoLoadConstant(assembler);
302}
303
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000304// LdaUndefined
305//
306// Load Undefined into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100307void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 Node* undefined_value =
309 __ HeapConstant(isolate_->factory()->undefined_value());
310 __ SetAccumulator(undefined_value);
311 __ Dispatch();
312}
313
314
315// LdaNull
316//
317// Load Null into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100318void Interpreter::DoLdaNull(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
320 __ SetAccumulator(null_value);
321 __ Dispatch();
322}
323
324
325// LdaTheHole
326//
327// Load TheHole into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100328void Interpreter::DoLdaTheHole(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
330 __ SetAccumulator(the_hole_value);
331 __ Dispatch();
332}
333
334
335// LdaTrue
336//
337// Load True into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100338void Interpreter::DoLdaTrue(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 Node* true_value = __ HeapConstant(isolate_->factory()->true_value());
340 __ SetAccumulator(true_value);
341 __ Dispatch();
342}
343
344
345// LdaFalse
346//
347// Load False into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100348void Interpreter::DoLdaFalse(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349 Node* false_value = __ HeapConstant(isolate_->factory()->false_value());
350 __ SetAccumulator(false_value);
351 __ Dispatch();
352}
353
354
355// Ldar <src>
356//
357// Load accumulator with value from register <src>.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100358void Interpreter::DoLdar(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 Node* reg_index = __ BytecodeOperandReg(0);
360 Node* value = __ LoadRegister(reg_index);
361 __ SetAccumulator(value);
362 __ Dispatch();
363}
364
365
366// Star <dst>
367//
368// Store accumulator to register <dst>.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100369void Interpreter::DoStar(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 Node* reg_index = __ BytecodeOperandReg(0);
371 Node* accumulator = __ GetAccumulator();
372 __ StoreRegister(accumulator, reg_index);
373 __ Dispatch();
374}
375
376
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000377// Mov <src> <dst>
378//
379// Stores the value of register <src> to register <dst>.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100380void Interpreter::DoMov(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 Node* src_index = __ BytecodeOperandReg(0);
382 Node* src_value = __ LoadRegister(src_index);
383 Node* dst_index = __ BytecodeOperandReg(1);
384 __ StoreRegister(src_value, dst_index);
385 __ Dispatch();
386}
387
388
Ben Murdoch097c5b22016-05-18 11:27:45 +0100389void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000390 // Get the global object.
391 Node* context = __ GetContext();
392 Node* native_context =
393 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
394 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
395
396 // Load the global via the LoadIC.
397 Node* code_target = __ HeapConstant(ic.code());
398 Node* constant_index = __ BytecodeOperandIdx(0);
399 Node* name = __ LoadConstantPoolEntry(constant_index);
400 Node* raw_slot = __ BytecodeOperandIdx(1);
401 Node* smi_slot = __ SmiTag(raw_slot);
402 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100403 Node* result = __ CallStub(ic.descriptor(), code_target, context, global,
404 name, smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 __ SetAccumulator(result);
406 __ Dispatch();
407}
408
Ben Murdoch097c5b22016-05-18 11:27:45 +0100409// LdaGlobal <name_index> <slot>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410//
411// Load the global with name in constant pool entry <name_index> into the
Ben Murdoch097c5b22016-05-18 11:27:45 +0100412// accumulator using FeedBackVector slot <slot> outside of a typeof.
413void Interpreter::DoLdaGlobal(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000414 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100415 UNINITIALIZED);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416 DoLoadGlobal(ic, assembler);
417}
418
Ben Murdoch097c5b22016-05-18 11:27:45 +0100419// LdaGlobalInsideTypeof <name_index> <slot>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420//
421// Load the global with name in constant pool entry <name_index> into the
Ben Murdoch097c5b22016-05-18 11:27:45 +0100422// accumulator using FeedBackVector slot <slot> inside of a typeof.
423void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) {
424 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
425 UNINITIALIZED);
426 DoLoadGlobal(ic, assembler);
427}
428
Ben Murdoch097c5b22016-05-18 11:27:45 +0100429void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430 // Get the global object.
431 Node* context = __ GetContext();
432 Node* native_context =
433 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
434 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
435
436 // Store the global via the StoreIC.
437 Node* code_target = __ HeapConstant(ic.code());
438 Node* constant_index = __ BytecodeOperandIdx(0);
439 Node* name = __ LoadConstantPoolEntry(constant_index);
440 Node* value = __ GetAccumulator();
441 Node* raw_slot = __ BytecodeOperandIdx(1);
442 Node* smi_slot = __ SmiTag(raw_slot);
443 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100444 __ CallStub(ic.descriptor(), code_target, context, global, name, value,
445 smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446 __ Dispatch();
447}
448
449
450// StaGlobalSloppy <name_index> <slot>
451//
452// Store the value in the accumulator into the global with name in constant pool
453// entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100454void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 Callable ic =
456 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
457 DoStoreGlobal(ic, assembler);
458}
459
460
461// StaGlobalStrict <name_index> <slot>
462//
463// Store the value in the accumulator into the global with name in constant pool
464// entry <name_index> using FeedBackVector slot <slot> in strict mode.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100465void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000466 Callable ic =
467 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
468 DoStoreGlobal(ic, assembler);
469}
470
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471// LdaContextSlot <context> <slot_index>
472//
473// Load the object in |slot_index| of |context| into the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100474void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000475 Node* reg_index = __ BytecodeOperandReg(0);
476 Node* context = __ LoadRegister(reg_index);
477 Node* slot_index = __ BytecodeOperandIdx(1);
478 Node* result = __ LoadContextSlot(context, slot_index);
479 __ SetAccumulator(result);
480 __ Dispatch();
481}
482
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483// StaContextSlot <context> <slot_index>
484//
485// Stores the object in the accumulator into |slot_index| of |context|.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100486void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 Node* value = __ GetAccumulator();
488 Node* reg_index = __ BytecodeOperandReg(0);
489 Node* context = __ LoadRegister(reg_index);
490 Node* slot_index = __ BytecodeOperandIdx(1);
491 __ StoreContextSlot(context, slot_index, value);
492 __ Dispatch();
493}
494
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000495void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100496 InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000497 Node* index = __ BytecodeOperandIdx(0);
498 Node* name = __ LoadConstantPoolEntry(index);
499 Node* context = __ GetContext();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100500 Node* result = __ CallRuntime(function_id, context, name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000501 __ SetAccumulator(result);
502 __ Dispatch();
503}
504
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505// LdaLookupSlot <name_index>
506//
507// Lookup the object with the name in constant pool entry |name_index|
508// dynamically.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100509void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000510 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler);
511}
512
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000513// LdaLookupSlotInsideTypeof <name_index>
514//
515// Lookup the object with the name in constant pool entry |name_index|
516// dynamically without causing a NoReferenceError.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100517void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) {
518 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000519}
520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000521void Interpreter::DoStoreLookupSlot(LanguageMode language_mode,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100522 InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523 Node* value = __ GetAccumulator();
524 Node* index = __ BytecodeOperandIdx(0);
525 Node* name = __ LoadConstantPoolEntry(index);
526 Node* context = __ GetContext();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100527 Node* result = __ CallRuntime(is_strict(language_mode)
528 ? Runtime::kStoreLookupSlot_Strict
529 : Runtime::kStoreLookupSlot_Sloppy,
530 context, name, value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 __ SetAccumulator(result);
532 __ Dispatch();
533}
534
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535// StaLookupSlotSloppy <name_index>
536//
537// Store the object in accumulator to the object with the name in constant
538// pool entry |name_index| in sloppy mode.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100539void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000540 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler);
541}
542
543
544// StaLookupSlotStrict <name_index>
545//
546// Store the object in accumulator to the object with the name in constant
547// pool entry |name_index| in strict mode.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100548void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 DoStoreLookupSlot(LanguageMode::STRICT, assembler);
550}
551
Ben Murdoch097c5b22016-05-18 11:27:45 +0100552void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000553 Node* code_target = __ HeapConstant(ic.code());
554 Node* register_index = __ BytecodeOperandReg(0);
555 Node* object = __ LoadRegister(register_index);
556 Node* constant_index = __ BytecodeOperandIdx(1);
557 Node* name = __ LoadConstantPoolEntry(constant_index);
558 Node* raw_slot = __ BytecodeOperandIdx(2);
559 Node* smi_slot = __ SmiTag(raw_slot);
560 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100561 Node* context = __ GetContext();
562 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
563 name, smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000564 __ SetAccumulator(result);
565 __ Dispatch();
566}
567
Ben Murdoch097c5b22016-05-18 11:27:45 +0100568// LoadIC <object> <name_index> <slot>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570// Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at
571// constant pool entry <name_index>.
572void Interpreter::DoLoadIC(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100574 UNINITIALIZED);
575 DoLoadIC(ic, assembler);
576}
577
Ben Murdoch097c5b22016-05-18 11:27:45 +0100578void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000579 Node* code_target = __ HeapConstant(ic.code());
580 Node* reg_index = __ BytecodeOperandReg(0);
581 Node* object = __ LoadRegister(reg_index);
582 Node* name = __ GetAccumulator();
583 Node* raw_slot = __ BytecodeOperandIdx(1);
584 Node* smi_slot = __ SmiTag(raw_slot);
585 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100586 Node* context = __ GetContext();
587 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
588 name, smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000589 __ SetAccumulator(result);
590 __ Dispatch();
591}
592
Ben Murdoch097c5b22016-05-18 11:27:45 +0100593// KeyedLoadIC <object> <slot>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100595// Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key
596// in the accumulator.
597void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598 Callable ic =
Ben Murdoch097c5b22016-05-18 11:27:45 +0100599 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED);
600 DoKeyedLoadIC(ic, assembler);
601}
602
Ben Murdoch097c5b22016-05-18 11:27:45 +0100603void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000604 Node* code_target = __ HeapConstant(ic.code());
605 Node* object_reg_index = __ BytecodeOperandReg(0);
606 Node* object = __ LoadRegister(object_reg_index);
607 Node* constant_index = __ BytecodeOperandIdx(1);
608 Node* name = __ LoadConstantPoolEntry(constant_index);
609 Node* value = __ GetAccumulator();
610 Node* raw_slot = __ BytecodeOperandIdx(2);
611 Node* smi_slot = __ SmiTag(raw_slot);
612 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100613 Node* context = __ GetContext();
614 __ CallStub(ic.descriptor(), code_target, context, object, name, value,
615 smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 __ Dispatch();
617}
618
619
620// StoreICSloppy <object> <name_index> <slot>
621//
622// Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
623// the name in constant pool entry <name_index> with the value in the
624// accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100625void Interpreter::DoStoreICSloppy(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626 Callable ic =
627 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
628 DoStoreIC(ic, assembler);
629}
630
631
632// StoreICStrict <object> <name_index> <slot>
633//
634// Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
635// the name in constant pool entry <name_index> with the value in the
636// accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100637void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000638 Callable ic =
639 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
640 DoStoreIC(ic, assembler);
641}
642
Ben Murdoch097c5b22016-05-18 11:27:45 +0100643void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000644 Node* code_target = __ HeapConstant(ic.code());
645 Node* object_reg_index = __ BytecodeOperandReg(0);
646 Node* object = __ LoadRegister(object_reg_index);
647 Node* name_reg_index = __ BytecodeOperandReg(1);
648 Node* name = __ LoadRegister(name_reg_index);
649 Node* value = __ GetAccumulator();
650 Node* raw_slot = __ BytecodeOperandIdx(2);
651 Node* smi_slot = __ SmiTag(raw_slot);
652 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100653 Node* context = __ GetContext();
654 __ CallStub(ic.descriptor(), code_target, context, object, name, value,
655 smi_slot, type_feedback_vector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 __ Dispatch();
657}
658
659
660// KeyedStoreICSloppy <object> <key> <slot>
661//
662// Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
663// and the key <key> with the value in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100664void Interpreter::DoKeyedStoreICSloppy(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000665 Callable ic =
666 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
667 DoKeyedStoreIC(ic, assembler);
668}
669
670
671// KeyedStoreICStore <object> <key> <slot>
672//
673// Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
674// and the key <key> with the value in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100675void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000676 Callable ic =
677 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
678 DoKeyedStoreIC(ic, assembler);
679}
680
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681// PushContext <context>
682//
Ben Murdoch097c5b22016-05-18 11:27:45 +0100683// Saves the current context in <context>, and pushes the accumulator as the
684// new current context.
685void Interpreter::DoPushContext(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000686 Node* reg_index = __ BytecodeOperandReg(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100687 Node* new_context = __ GetAccumulator();
688 Node* old_context = __ GetContext();
689 __ StoreRegister(old_context, reg_index);
690 __ SetContext(new_context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000691 __ Dispatch();
692}
693
694
695// PopContext <context>
696//
697// Pops the current context and sets <context> as the new context.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100698void Interpreter::DoPopContext(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000699 Node* reg_index = __ BytecodeOperandReg(0);
700 Node* context = __ LoadRegister(reg_index);
701 __ SetContext(context);
702 __ Dispatch();
703}
704
Ben Murdochda12d292016-06-02 14:46:10 +0100705void Interpreter::DoBinaryOp(Callable callable,
706 InterpreterAssembler* assembler) {
707 // TODO(bmeurer): Collect definition side type feedback for various
708 // binary operations.
709 Node* target = __ HeapConstant(callable.code());
710 Node* reg_index = __ BytecodeOperandReg(0);
711 Node* lhs = __ LoadRegister(reg_index);
712 Node* rhs = __ GetAccumulator();
713 Node* context = __ GetContext();
714 Node* result = __ CallStub(callable.descriptor(), target, context, lhs, rhs);
715 __ SetAccumulator(result);
716 __ Dispatch();
717}
718
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719void Interpreter::DoBinaryOp(Runtime::FunctionId function_id,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100720 InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000721 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized
722 // operations, instead of calling builtins directly.
723 Node* reg_index = __ BytecodeOperandReg(0);
724 Node* lhs = __ LoadRegister(reg_index);
725 Node* rhs = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100726 Node* context = __ GetContext();
727 Node* result = __ CallRuntime(function_id, context, lhs, rhs);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 __ SetAccumulator(result);
729 __ Dispatch();
730}
731
Ben Murdochc5610432016-08-08 18:44:38 +0100732template <class Generator>
733void Interpreter::DoBinaryOp(InterpreterAssembler* assembler) {
734 Node* reg_index = __ BytecodeOperandReg(0);
735 Node* lhs = __ LoadRegister(reg_index);
736 Node* rhs = __ GetAccumulator();
737 Node* context = __ GetContext();
738 Node* result = Generator::Generate(assembler, lhs, rhs, context);
739 __ SetAccumulator(result);
740 __ Dispatch();
741}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000742
743// Add <src>
744//
745// Add register <src> to accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100746void Interpreter::DoAdd(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100747 DoBinaryOp<AddStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000748}
749
750
751// Sub <src>
752//
753// Subtract register <src> from accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100754void Interpreter::DoSub(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100755 DoBinaryOp<SubtractStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000756}
757
758
759// Mul <src>
760//
761// Multiply accumulator by register <src>.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100762void Interpreter::DoMul(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100763 DoBinaryOp<MultiplyStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000764}
765
766
767// Div <src>
768//
769// Divide register <src> by accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100770void Interpreter::DoDiv(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100771 DoBinaryOp<DivideStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000772}
773
774
775// Mod <src>
776//
777// Modulo register <src> by accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100778void Interpreter::DoMod(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100779 DoBinaryOp<ModulusStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000780}
781
782
783// BitwiseOr <src>
784//
785// BitwiseOr register <src> to accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100786void Interpreter::DoBitwiseOr(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100787 DoBinaryOp<BitwiseOrStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000788}
789
790
791// BitwiseXor <src>
792//
793// BitwiseXor register <src> to accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100794void Interpreter::DoBitwiseXor(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100795 DoBinaryOp<BitwiseXorStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000796}
797
798
799// BitwiseAnd <src>
800//
801// BitwiseAnd register <src> to accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100802void Interpreter::DoBitwiseAnd(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100803 DoBinaryOp<BitwiseAndStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804}
805
806
807// ShiftLeft <src>
808//
809// Left shifts register <src> by the count specified in the accumulator.
810// Register <src> is converted to an int32 and the accumulator to uint32
811// before the operation. 5 lsb bits from the accumulator are used as count
812// i.e. <src> << (accumulator & 0x1F).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100813void Interpreter::DoShiftLeft(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100814 DoBinaryOp<ShiftLeftStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000815}
816
817
818// ShiftRight <src>
819//
820// Right shifts register <src> by the count specified in the accumulator.
821// Result is sign extended. Register <src> is converted to an int32 and the
822// accumulator to uint32 before the operation. 5 lsb bits from the accumulator
823// are used as count i.e. <src> >> (accumulator & 0x1F).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100824void Interpreter::DoShiftRight(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100825 DoBinaryOp<ShiftRightStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000826}
827
828
829// ShiftRightLogical <src>
830//
831// Right Shifts register <src> by the count specified in the accumulator.
832// Result is zero-filled. The accumulator and register <src> are converted to
833// uint32 before the operation 5 lsb bits from the accumulator are used as
834// count i.e. <src> << (accumulator & 0x1F).
Ben Murdoch097c5b22016-05-18 11:27:45 +0100835void Interpreter::DoShiftRightLogical(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100836 DoBinaryOp<ShiftRightLogicalStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000837}
838
Ben Murdochc5610432016-08-08 18:44:38 +0100839template <class Generator>
840void Interpreter::DoUnaryOp(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000841 Node* value = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100842 Node* context = __ GetContext();
Ben Murdochc5610432016-08-08 18:44:38 +0100843 Node* result = Generator::Generate(assembler, value, context);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 __ SetAccumulator(result);
845 __ Dispatch();
846}
847
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000848// Inc
849//
850// Increments value in the accumulator by one.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100851void Interpreter::DoInc(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100852 DoUnaryOp<IncStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000853}
854
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855// Dec
856//
857// Decrements value in the accumulator by one.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100858void Interpreter::DoDec(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +0100859 DoUnaryOp<DecStub>(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860}
861
Ben Murdochc5610432016-08-08 18:44:38 +0100862void Interpreter::DoLogicalNotOp(Node* value, InterpreterAssembler* assembler) {
863 Label if_true(assembler), if_false(assembler), end(assembler);
864 Node* true_value = __ BooleanConstant(true);
865 Node* false_value = __ BooleanConstant(false);
866 __ BranchIfWordEqual(value, true_value, &if_true, &if_false);
867 __ Bind(&if_true);
868 {
869 __ SetAccumulator(false_value);
870 __ Goto(&end);
871 }
872 __ Bind(&if_false);
873 {
874 if (FLAG_debug_code) {
875 __ AbortIfWordNotEqual(value, false_value,
876 BailoutReason::kExpectedBooleanValue);
877 }
878 __ SetAccumulator(true_value);
879 __ Goto(&end);
880 }
881 __ Bind(&end);
882}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883
Ben Murdochc5610432016-08-08 18:44:38 +0100884// ToBooleanLogicalNot
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000885//
886// Perform logical-not on the accumulator, first casting the
887// accumulator to a boolean value if required.
Ben Murdochc5610432016-08-08 18:44:38 +0100888void Interpreter::DoToBooleanLogicalNot(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +0100889 Callable callable = CodeFactory::ToBoolean(isolate_);
890 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000891 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100892 Node* context = __ GetContext();
Ben Murdochda12d292016-06-02 14:46:10 +0100893 Node* to_boolean_value =
894 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdochc5610432016-08-08 18:44:38 +0100895 DoLogicalNotOp(to_boolean_value, assembler);
896 __ Dispatch();
897}
898
899// LogicalNot
900//
901// Perform logical-not on the accumulator, which must already be a boolean
902// value.
903void Interpreter::DoLogicalNot(InterpreterAssembler* assembler) {
904 Node* value = __ GetAccumulator();
905 DoLogicalNotOp(value, assembler);
906 __ Dispatch();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907}
908
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000909// TypeOf
910//
911// Load the accumulator with the string representating type of the
912// object in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100913void Interpreter::DoTypeOf(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +0100914 Callable callable = CodeFactory::Typeof(isolate_);
915 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000916 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100917 Node* context = __ GetContext();
918 Node* result =
Ben Murdochda12d292016-06-02 14:46:10 +0100919 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 __ SetAccumulator(result);
921 __ Dispatch();
922}
923
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924void Interpreter::DoDelete(Runtime::FunctionId function_id,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100925 InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000926 Node* reg_index = __ BytecodeOperandReg(0);
927 Node* object = __ LoadRegister(reg_index);
928 Node* key = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100929 Node* context = __ GetContext();
930 Node* result = __ CallRuntime(function_id, context, object, key);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000931 __ SetAccumulator(result);
932 __ Dispatch();
933}
934
935
936// DeletePropertyStrict
937//
938// Delete the property specified in the accumulator from the object
939// referenced by the register operand following strict mode semantics.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100940void Interpreter::DoDeletePropertyStrict(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000941 DoDelete(Runtime::kDeleteProperty_Strict, assembler);
942}
943
944
945// DeletePropertySloppy
946//
947// Delete the property specified in the accumulator from the object
948// referenced by the register operand following sloppy mode semantics.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100949void Interpreter::DoDeletePropertySloppy(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000950 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler);
951}
952
Ben Murdoch097c5b22016-05-18 11:27:45 +0100953void Interpreter::DoJSCall(InterpreterAssembler* assembler,
954 TailCallMode tail_call_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000955 Node* function_reg = __ BytecodeOperandReg(0);
956 Node* function = __ LoadRegister(function_reg);
957 Node* receiver_reg = __ BytecodeOperandReg(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100958 Node* receiver_arg = __ RegisterLocation(receiver_reg);
959 Node* receiver_args_count = __ BytecodeOperandCount(2);
960 Node* receiver_count = __ Int32Constant(1);
961 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count);
962 Node* context = __ GetContext();
963 // TODO(rmcilroy): Use the call type feedback slot to call via CallStub.
964 Node* result =
965 __ CallJS(function, context, receiver_arg, args_count, tail_call_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000966 __ SetAccumulator(result);
967 __ Dispatch();
968}
969
970
971// Call <callable> <receiver> <arg_count>
972//
973// Call a JSfunction or Callable in |callable| with the |receiver| and
974// |arg_count| arguments in subsequent registers.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100975void Interpreter::DoCall(InterpreterAssembler* assembler) {
976 DoJSCall(assembler, TailCallMode::kDisallow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000977}
978
Ben Murdoch097c5b22016-05-18 11:27:45 +0100979// TailCall <callable> <receiver> <arg_count>
980//
981// Tail call a JSfunction or Callable in |callable| with the |receiver| and
982// |arg_count| arguments in subsequent registers.
983void Interpreter::DoTailCall(InterpreterAssembler* assembler) {
984 DoJSCall(assembler, TailCallMode::kAllow);
985}
986
Ben Murdoch097c5b22016-05-18 11:27:45 +0100987void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +0100988 Node* function_id = __ BytecodeOperandRuntimeId(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100989 Node* first_arg_reg = __ BytecodeOperandReg(1);
990 Node* first_arg = __ RegisterLocation(first_arg_reg);
991 Node* args_count = __ BytecodeOperandCount(2);
992 Node* context = __ GetContext();
993 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count);
994 __ SetAccumulator(result);
995 __ Dispatch();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000996}
997
998
999// CallRuntime <function_id> <first_arg> <arg_count>
1000//
1001// Call the runtime function |function_id| with the first argument in
1002// register |first_arg| and |arg_count| arguments in subsequent
1003// registers.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001004void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) {
1005 DoCallRuntimeCommon(assembler);
1006}
1007
Ben Murdochda12d292016-06-02 14:46:10 +01001008// InvokeIntrinsic <function_id> <first_arg> <arg_count>
Ben Murdoch097c5b22016-05-18 11:27:45 +01001009//
Ben Murdochda12d292016-06-02 14:46:10 +01001010// Implements the semantic equivalent of calling the runtime function
1011// |function_id| with the first argument in |first_arg| and |arg_count|
1012// arguments in subsequent registers.
1013void Interpreter::DoInvokeIntrinsic(InterpreterAssembler* assembler) {
1014 Node* function_id = __ BytecodeOperandRuntimeId(0);
1015 Node* first_arg_reg = __ BytecodeOperandReg(1);
1016 Node* arg_count = __ BytecodeOperandCount(2);
1017 Node* context = __ GetContext();
1018 IntrinsicsHelper helper(assembler);
1019 Node* result =
1020 helper.InvokeIntrinsic(function_id, context, first_arg_reg, arg_count);
1021 __ SetAccumulator(result);
1022 __ Dispatch();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001023}
1024
1025void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) {
1026 // Call the runtime function.
Ben Murdochda12d292016-06-02 14:46:10 +01001027 Node* function_id = __ BytecodeOperandRuntimeId(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001028 Node* first_arg_reg = __ BytecodeOperandReg(1);
1029 Node* first_arg = __ RegisterLocation(first_arg_reg);
1030 Node* args_count = __ BytecodeOperandCount(2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001031 Node* context = __ GetContext();
1032 Node* result_pair =
1033 __ CallRuntimeN(function_id, context, first_arg, args_count, 2);
1034
1035 // Store the results in <first_return> and <first_return + 1>
1036 Node* first_return_reg = __ BytecodeOperandReg(3);
1037 Node* second_return_reg = __ NextRegister(first_return_reg);
1038 Node* result0 = __ Projection(0, result_pair);
1039 Node* result1 = __ Projection(1, result_pair);
1040 __ StoreRegister(result0, first_return_reg);
1041 __ StoreRegister(result1, second_return_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 __ Dispatch();
1043}
1044
1045
1046// CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
1047//
1048// Call the runtime function |function_id| which returns a pair, with the
1049// first argument in register |first_arg| and |arg_count| arguments in
1050// subsequent registers. Returns the result in <first_return> and
1051// <first_return + 1>
Ben Murdoch097c5b22016-05-18 11:27:45 +01001052void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) {
1053 DoCallRuntimeForPairCommon(assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001054}
1055
Ben Murdoch097c5b22016-05-18 11:27:45 +01001056void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001057 Node* context_index = __ BytecodeOperandIdx(0);
1058 Node* receiver_reg = __ BytecodeOperandReg(1);
1059 Node* first_arg = __ RegisterLocation(receiver_reg);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001060 Node* receiver_args_count = __ BytecodeOperandCount(2);
1061 Node* receiver_count = __ Int32Constant(1);
1062 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001063
1064 // Get the function to call from the native context.
1065 Node* context = __ GetContext();
1066 Node* native_context =
1067 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
1068 Node* function = __ LoadContextSlot(native_context, context_index);
1069
1070 // Call the function.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001071 Node* result = __ CallJS(function, context, first_arg, args_count,
1072 TailCallMode::kDisallow);
1073 __ SetAccumulator(result);
1074 __ Dispatch();
1075}
1076
1077
1078// CallJSRuntime <context_index> <receiver> <arg_count>
1079//
1080// Call the JS runtime function that has the |context_index| with the receiver
1081// in register |receiver| and |arg_count| arguments in subsequent registers.
1082void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
1083 DoCallJSRuntimeCommon(assembler);
1084}
1085
Ben Murdoch097c5b22016-05-18 11:27:45 +01001086void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) {
1087 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
1088 Node* new_target = __ GetAccumulator();
1089 Node* constructor_reg = __ BytecodeOperandReg(0);
1090 Node* constructor = __ LoadRegister(constructor_reg);
1091 Node* first_arg_reg = __ BytecodeOperandReg(1);
1092 Node* first_arg = __ RegisterLocation(first_arg_reg);
1093 Node* args_count = __ BytecodeOperandCount(2);
1094 Node* context = __ GetContext();
1095 Node* result =
1096 __ CallConstruct(constructor, context, new_target, first_arg, args_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097 __ SetAccumulator(result);
1098 __ Dispatch();
1099}
1100
1101
1102// New <constructor> <first_arg> <arg_count>
1103//
1104// Call operator new with |constructor| and the first argument in
1105// register |first_arg| and |arg_count| arguments in subsequent
Ben Murdoch097c5b22016-05-18 11:27:45 +01001106// registers. The new.target is in the accumulator.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001107//
Ben Murdoch097c5b22016-05-18 11:27:45 +01001108void Interpreter::DoNew(InterpreterAssembler* assembler) {
1109 DoCallConstruct(assembler);
1110}
1111
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112// TestEqual <src>
1113//
1114// Test if the value in the <src> register equals the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001115void Interpreter::DoTestEqual(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001116 DoBinaryOp(CodeFactory::Equal(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001117}
1118
1119
1120// TestNotEqual <src>
1121//
1122// Test if the value in the <src> register is not equal to the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001123void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001124 DoBinaryOp(CodeFactory::NotEqual(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001125}
1126
1127
1128// TestEqualStrict <src>
1129//
1130// Test if the value in the <src> register is strictly equal to the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001131void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001132 DoBinaryOp(CodeFactory::StrictEqual(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001133}
1134
1135
1136// TestLessThan <src>
1137//
1138// Test if the value in the <src> register is less than the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001139void Interpreter::DoTestLessThan(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001140 DoBinaryOp(CodeFactory::LessThan(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141}
1142
1143
1144// TestGreaterThan <src>
1145//
1146// Test if the value in the <src> register is greater than the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001147void Interpreter::DoTestGreaterThan(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001148 DoBinaryOp(CodeFactory::GreaterThan(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149}
1150
1151
1152// TestLessThanOrEqual <src>
1153//
1154// Test if the value in the <src> register is less than or equal to the
1155// accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001156void Interpreter::DoTestLessThanOrEqual(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001157 DoBinaryOp(CodeFactory::LessThanOrEqual(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158}
1159
1160
1161// TestGreaterThanOrEqual <src>
1162//
1163// Test if the value in the <src> register is greater than or equal to the
1164// accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001165void Interpreter::DoTestGreaterThanOrEqual(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001166 DoBinaryOp(CodeFactory::GreaterThanOrEqual(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167}
1168
1169
1170// TestIn <src>
1171//
1172// Test if the object referenced by the register operand is a property of the
1173// object referenced by the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001174void Interpreter::DoTestIn(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001175 DoBinaryOp(CodeFactory::HasProperty(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001176}
1177
1178
1179// TestInstanceOf <src>
1180//
1181// Test if the object referenced by the <src> register is an an instance of type
1182// referenced by the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001183void Interpreter::DoTestInstanceOf(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001184 DoBinaryOp(CodeFactory::InstanceOf(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185}
1186
Ben Murdochda12d292016-06-02 14:46:10 +01001187void Interpreter::DoTypeConversionOp(Callable callable,
1188 InterpreterAssembler* assembler) {
1189 Node* target = __ HeapConstant(callable.code());
1190 Node* accumulator = __ GetAccumulator();
1191 Node* context = __ GetContext();
1192 Node* result =
1193 __ CallStub(callable.descriptor(), target, context, accumulator);
1194 __ SetAccumulator(result);
1195 __ Dispatch();
1196}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197
1198// ToName
1199//
1200// Cast the object referenced by the accumulator to a name.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001201void Interpreter::DoToName(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001202 DoTypeConversionOp(CodeFactory::ToName(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203}
1204
1205
1206// ToNumber
1207//
1208// Cast the object referenced by the accumulator to a number.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001209void Interpreter::DoToNumber(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001210 DoTypeConversionOp(CodeFactory::ToNumber(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001211}
1212
1213
1214// ToObject
1215//
1216// Cast the object referenced by the accumulator to a JSObject.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001217void Interpreter::DoToObject(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001218 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219}
1220
Ben Murdochda12d292016-06-02 14:46:10 +01001221// Jump <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001222//
Ben Murdochda12d292016-06-02 14:46:10 +01001223// Jump by number of bytes represented by the immediate operand |imm|.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001224void Interpreter::DoJump(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 Node* relative_jump = __ BytecodeOperandImm(0);
1226 __ Jump(relative_jump);
1227}
1228
Ben Murdochda12d292016-06-02 14:46:10 +01001229// JumpConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230//
Ben Murdochda12d292016-06-02 14:46:10 +01001231// Jump by number of bytes in the Smi in the |idx| entry in the constant pool.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001232void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001233 Node* index = __ BytecodeOperandIdx(0);
1234 Node* constant = __ LoadConstantPoolEntry(index);
1235 Node* relative_jump = __ SmiUntag(constant);
1236 __ Jump(relative_jump);
1237}
1238
Ben Murdochda12d292016-06-02 14:46:10 +01001239// JumpIfTrue <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001240//
1241// Jump by number of bytes represented by an immediate operand if the
1242// accumulator contains true.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001243void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001244 Node* accumulator = __ GetAccumulator();
1245 Node* relative_jump = __ BytecodeOperandImm(0);
1246 Node* true_value = __ BooleanConstant(true);
1247 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1248}
1249
Ben Murdochda12d292016-06-02 14:46:10 +01001250// JumpIfTrueConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251//
Ben Murdochda12d292016-06-02 14:46:10 +01001252// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001253// if the accumulator contains true.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001254void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001255 Node* accumulator = __ GetAccumulator();
1256 Node* index = __ BytecodeOperandIdx(0);
1257 Node* constant = __ LoadConstantPoolEntry(index);
1258 Node* relative_jump = __ SmiUntag(constant);
1259 Node* true_value = __ BooleanConstant(true);
1260 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1261}
1262
Ben Murdochda12d292016-06-02 14:46:10 +01001263// JumpIfFalse <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001264//
1265// Jump by number of bytes represented by an immediate operand if the
1266// accumulator contains false.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001267void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001268 Node* accumulator = __ GetAccumulator();
1269 Node* relative_jump = __ BytecodeOperandImm(0);
1270 Node* false_value = __ BooleanConstant(false);
1271 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1272}
1273
Ben Murdochda12d292016-06-02 14:46:10 +01001274// JumpIfFalseConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001275//
Ben Murdochda12d292016-06-02 14:46:10 +01001276// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001277// if the accumulator contains false.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001278void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001279 Node* accumulator = __ GetAccumulator();
1280 Node* index = __ BytecodeOperandIdx(0);
1281 Node* constant = __ LoadConstantPoolEntry(index);
1282 Node* relative_jump = __ SmiUntag(constant);
1283 Node* false_value = __ BooleanConstant(false);
1284 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1285}
1286
Ben Murdochda12d292016-06-02 14:46:10 +01001287// JumpIfToBooleanTrue <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001288//
1289// Jump by number of bytes represented by an immediate operand if the object
1290// referenced by the accumulator is true when the object is cast to boolean.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001291void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001292 Callable callable = CodeFactory::ToBoolean(isolate_);
1293 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001294 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001295 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001296 Node* to_boolean_value =
Ben Murdochda12d292016-06-02 14:46:10 +01001297 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001298 Node* relative_jump = __ BytecodeOperandImm(0);
1299 Node* true_value = __ BooleanConstant(true);
1300 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1301}
1302
Ben Murdochda12d292016-06-02 14:46:10 +01001303// JumpIfToBooleanTrueConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304//
Ben Murdochda12d292016-06-02 14:46:10 +01001305// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001306// if the object referenced by the accumulator is true when the object is cast
1307// to boolean.
1308void Interpreter::DoJumpIfToBooleanTrueConstant(
Ben Murdoch097c5b22016-05-18 11:27:45 +01001309 InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001310 Callable callable = CodeFactory::ToBoolean(isolate_);
1311 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001312 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001313 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001314 Node* to_boolean_value =
Ben Murdochda12d292016-06-02 14:46:10 +01001315 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001316 Node* index = __ BytecodeOperandIdx(0);
1317 Node* constant = __ LoadConstantPoolEntry(index);
1318 Node* relative_jump = __ SmiUntag(constant);
1319 Node* true_value = __ BooleanConstant(true);
1320 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1321}
1322
Ben Murdochda12d292016-06-02 14:46:10 +01001323// JumpIfToBooleanFalse <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001324//
1325// Jump by number of bytes represented by an immediate operand if the object
1326// referenced by the accumulator is false when the object is cast to boolean.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001327void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001328 Callable callable = CodeFactory::ToBoolean(isolate_);
1329 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001330 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001331 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 Node* to_boolean_value =
Ben Murdochda12d292016-06-02 14:46:10 +01001333 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001334 Node* relative_jump = __ BytecodeOperandImm(0);
1335 Node* false_value = __ BooleanConstant(false);
1336 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1337}
1338
Ben Murdochda12d292016-06-02 14:46:10 +01001339// JumpIfToBooleanFalseConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001340//
Ben Murdochda12d292016-06-02 14:46:10 +01001341// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342// if the object referenced by the accumulator is false when the object is cast
1343// to boolean.
1344void Interpreter::DoJumpIfToBooleanFalseConstant(
Ben Murdoch097c5b22016-05-18 11:27:45 +01001345 InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001346 Callable callable = CodeFactory::ToBoolean(isolate_);
1347 Node* target = __ HeapConstant(callable.code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001348 Node* accumulator = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001349 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001350 Node* to_boolean_value =
Ben Murdochda12d292016-06-02 14:46:10 +01001351 __ CallStub(callable.descriptor(), target, context, accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352 Node* index = __ BytecodeOperandIdx(0);
1353 Node* constant = __ LoadConstantPoolEntry(index);
1354 Node* relative_jump = __ SmiUntag(constant);
1355 Node* false_value = __ BooleanConstant(false);
1356 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1357}
1358
Ben Murdochda12d292016-06-02 14:46:10 +01001359// JumpIfNull <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360//
1361// Jump by number of bytes represented by an immediate operand if the object
1362// referenced by the accumulator is the null constant.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001363void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001364 Node* accumulator = __ GetAccumulator();
1365 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1366 Node* relative_jump = __ BytecodeOperandImm(0);
1367 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1368}
1369
Ben Murdochda12d292016-06-02 14:46:10 +01001370// JumpIfNullConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001371//
Ben Murdochda12d292016-06-02 14:46:10 +01001372// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001373// if the object referenced by the accumulator is the null constant.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001374void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001375 Node* accumulator = __ GetAccumulator();
1376 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1377 Node* index = __ BytecodeOperandIdx(0);
1378 Node* constant = __ LoadConstantPoolEntry(index);
1379 Node* relative_jump = __ SmiUntag(constant);
1380 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1381}
1382
Ben Murdochda12d292016-06-02 14:46:10 +01001383// JumpIfUndefined <imm>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001384//
1385// Jump by number of bytes represented by an immediate operand if the object
1386// referenced by the accumulator is the undefined constant.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001387void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001388 Node* accumulator = __ GetAccumulator();
1389 Node* undefined_value =
1390 __ HeapConstant(isolate_->factory()->undefined_value());
1391 Node* relative_jump = __ BytecodeOperandImm(0);
1392 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1393}
1394
Ben Murdochda12d292016-06-02 14:46:10 +01001395// JumpIfUndefinedConstant <idx>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396//
Ben Murdochda12d292016-06-02 14:46:10 +01001397// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001398// if the object referenced by the accumulator is the undefined constant.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001399void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400 Node* accumulator = __ GetAccumulator();
1401 Node* undefined_value =
1402 __ HeapConstant(isolate_->factory()->undefined_value());
1403 Node* index = __ BytecodeOperandIdx(0);
1404 Node* constant = __ LoadConstantPoolEntry(index);
1405 Node* relative_jump = __ SmiUntag(constant);
1406 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1407}
1408
Ben Murdochda12d292016-06-02 14:46:10 +01001409// JumpIfNotHole <imm>
Ben Murdoch097c5b22016-05-18 11:27:45 +01001410//
1411// Jump by number of bytes represented by an immediate operand if the object
1412// referenced by the accumulator is the hole.
1413void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) {
1414 Node* accumulator = __ GetAccumulator();
1415 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1416 Node* relative_jump = __ BytecodeOperandImm(0);
1417 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1418}
1419
Ben Murdochda12d292016-06-02 14:46:10 +01001420// JumpIfNotHoleConstant <idx>
Ben Murdoch097c5b22016-05-18 11:27:45 +01001421//
Ben Murdochda12d292016-06-02 14:46:10 +01001422// Jump by number of bytes in the Smi in the |idx| entry in the constant pool
Ben Murdoch097c5b22016-05-18 11:27:45 +01001423// if the object referenced by the accumulator is the hole constant.
1424void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) {
1425 Node* accumulator = __ GetAccumulator();
1426 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1427 Node* index = __ BytecodeOperandIdx(0);
1428 Node* constant = __ LoadConstantPoolEntry(index);
1429 Node* relative_jump = __ SmiUntag(constant);
1430 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1431}
1432
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001433// CreateRegExpLiteral <pattern_idx> <literal_idx> <flags>
1434//
1435// Creates a regular expression literal for literal index <literal_idx> with
1436// <flags> and the pattern in <pattern_idx>.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001437void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) {
Ben Murdochda12d292016-06-02 14:46:10 +01001438 Callable callable = CodeFactory::FastCloneRegExp(isolate_);
1439 Node* target = __ HeapConstant(callable.code());
1440 Node* index = __ BytecodeOperandIdx(0);
1441 Node* pattern = __ LoadConstantPoolEntry(index);
1442 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1443 Node* literal_index = __ SmiTag(literal_index_raw);
1444 Node* flags_raw = __ BytecodeOperandFlag(2);
1445 Node* flags = __ SmiTag(flags_raw);
1446 Node* closure = __ LoadRegister(Register::function_closure());
1447 Node* context = __ GetContext();
1448 Node* result = __ CallStub(callable.descriptor(), target, context, closure,
1449 literal_index, pattern, flags);
1450 __ SetAccumulator(result);
1451 __ Dispatch();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001452}
1453
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001454// CreateArrayLiteral <element_idx> <literal_idx> <flags>
1455//
1456// Creates an array literal for literal index <literal_idx> with flags <flags>
1457// and constant elements in <element_idx>.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001458void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001459 Node* index = __ BytecodeOperandIdx(0);
1460 Node* constant_elements = __ LoadConstantPoolEntry(index);
1461 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1462 Node* literal_index = __ SmiTag(literal_index_raw);
1463 Node* flags_raw = __ BytecodeOperandFlag(2);
1464 Node* flags = __ SmiTag(flags_raw);
1465 Node* closure = __ LoadRegister(Register::function_closure());
1466 Node* context = __ GetContext();
1467 Node* result = __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
1468 literal_index, constant_elements, flags);
1469 __ SetAccumulator(result);
1470 __ Dispatch();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001471}
1472
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001473// CreateObjectLiteral <element_idx> <literal_idx> <flags>
1474//
Ben Murdochc5610432016-08-08 18:44:38 +01001475// Creates an object literal for literal index <literal_idx> with
1476// CreateObjectLiteralFlags <flags> and constant elements in <element_idx>.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001477void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001478 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1479 Node* literal_index = __ SmiTag(literal_index_raw);
1480 Node* bytecode_flags = __ BytecodeOperandFlag(2);
1481 Node* closure = __ LoadRegister(Register::function_closure());
1482
1483 // Check if we can do a fast clone or have to call the runtime.
1484 Label if_fast_clone(assembler),
1485 if_not_fast_clone(assembler, Label::kDeferred);
1486 Node* fast_clone_properties_count =
1487 __ BitFieldDecode<CreateObjectLiteralFlags::FastClonePropertiesCountBits>(
1488 bytecode_flags);
1489 __ BranchIf(fast_clone_properties_count, &if_fast_clone, &if_not_fast_clone);
1490
1491 __ Bind(&if_fast_clone);
1492 {
1493 // If we can do a fast clone do the fast-path in FastCloneShallowObjectStub.
1494 Node* result = FastCloneShallowObjectStub::GenerateFastPath(
1495 assembler, &if_not_fast_clone, closure, literal_index,
1496 fast_clone_properties_count);
1497 __ SetAccumulator(result);
1498 __ Dispatch();
1499 }
1500
1501 __ Bind(&if_not_fast_clone);
1502 {
1503 // If we can't do a fast clone, call into the runtime.
1504 Node* index = __ BytecodeOperandIdx(0);
1505 Node* constant_elements = __ LoadConstantPoolEntry(index);
1506 Node* context = __ GetContext();
1507
1508 STATIC_ASSERT(CreateObjectLiteralFlags::FlagsBits::kShift == 0);
1509 Node* flags_raw = __ Word32And(
1510 bytecode_flags,
1511 __ Int32Constant(CreateObjectLiteralFlags::FlagsBits::kMask));
1512 Node* flags = __ SmiTag(flags_raw);
1513
1514 Node* result =
1515 __ CallRuntime(Runtime::kCreateObjectLiteral, context, closure,
1516 literal_index, constant_elements, flags);
1517 __ SetAccumulator(result);
1518 __ Dispatch();
1519 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001520}
1521
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522// CreateClosure <index> <tenured>
1523//
1524// Creates a new closure for SharedFunctionInfo at position |index| in the
1525// constant pool and with the PretenureFlag <tenured>.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001526void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of
1528 // calling into the runtime.
1529 Node* index = __ BytecodeOperandIdx(0);
1530 Node* shared = __ LoadConstantPoolEntry(index);
Ben Murdochda12d292016-06-02 14:46:10 +01001531 Node* tenured_raw = __ BytecodeOperandFlag(1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001532 Node* tenured = __ SmiTag(tenured_raw);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001533 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001534 Node* result =
Ben Murdoch097c5b22016-05-18 11:27:45 +01001535 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 __ SetAccumulator(result);
1537 __ Dispatch();
1538}
1539
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001540// CreateMappedArguments
1541//
1542// Creates a new mapped arguments object.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001543void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 Node* closure = __ LoadRegister(Register::function_closure());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001545 Node* context = __ GetContext();
Ben Murdochc5610432016-08-08 18:44:38 +01001546
1547 Label if_duplicate_parameters(assembler, Label::kDeferred);
1548 Label if_not_duplicate_parameters(assembler);
1549
1550 // Check if function has duplicate parameters.
1551 // TODO(rmcilroy): Remove this check when FastNewSloppyArgumentsStub supports
1552 // duplicate parameters.
1553 Node* shared_info =
1554 __ LoadObjectField(closure, JSFunction::kSharedFunctionInfoOffset);
1555 Node* compiler_hints = __ LoadObjectField(
1556 shared_info, SharedFunctionInfo::kHasDuplicateParametersByteOffset,
1557 MachineType::Uint8());
1558 Node* duplicate_parameters_bit = __ Int32Constant(
1559 1 << SharedFunctionInfo::kHasDuplicateParametersBitWithinByte);
1560 Node* compare = __ Word32And(compiler_hints, duplicate_parameters_bit);
1561 __ BranchIf(compare, &if_duplicate_parameters, &if_not_duplicate_parameters);
1562
1563 __ Bind(&if_not_duplicate_parameters);
1564 {
1565 // TODO(rmcilroy): Inline FastNewSloppyArguments when it is a TurboFan stub.
1566 Callable callable = CodeFactory::FastNewSloppyArguments(isolate_, true);
1567 Node* target = __ HeapConstant(callable.code());
1568 Node* result = __ CallStub(callable.descriptor(), target, context, closure);
1569 __ SetAccumulator(result);
1570 __ Dispatch();
1571 }
1572
1573 __ Bind(&if_duplicate_parameters);
1574 {
1575 Node* result =
1576 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure);
1577 __ SetAccumulator(result);
1578 __ Dispatch();
1579 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001580}
1581
1582
1583// CreateUnmappedArguments
1584//
1585// Creates a new unmapped arguments object.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001586void Interpreter::DoCreateUnmappedArguments(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001587 // TODO(rmcilroy): Inline FastNewStrictArguments when it is a TurboFan stub.
1588 Callable callable = CodeFactory::FastNewStrictArguments(isolate_, true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001589 Node* target = __ HeapConstant(callable.code());
1590 Node* context = __ GetContext();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001591 Node* closure = __ LoadRegister(Register::function_closure());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001592 Node* result = __ CallStub(callable.descriptor(), target, context, closure);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 __ SetAccumulator(result);
1594 __ Dispatch();
1595}
1596
Ben Murdoch097c5b22016-05-18 11:27:45 +01001597// CreateRestParameter
1598//
1599// Creates a new rest parameter array.
1600void Interpreter::DoCreateRestParameter(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001601 // TODO(rmcilroy): Inline FastNewRestArguments when it is a TurboFan stub.
1602 Callable callable = CodeFactory::FastNewRestParameter(isolate_, true);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001603 Node* target = __ HeapConstant(callable.code());
1604 Node* closure = __ LoadRegister(Register::function_closure());
1605 Node* context = __ GetContext();
1606 Node* result = __ CallStub(callable.descriptor(), target, context, closure);
1607 __ SetAccumulator(result);
1608 __ Dispatch();
1609}
1610
1611// StackCheck
1612//
1613// Performs a stack guard check.
1614void Interpreter::DoStackCheck(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001615 Label ok(assembler), stack_check_interrupt(assembler, Label::kDeferred);
1616
1617 Node* interrupt = __ StackCheckTriggeredInterrupt();
1618 __ BranchIf(interrupt, &stack_check_interrupt, &ok);
1619
1620 __ Bind(&ok);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001621 __ Dispatch();
Ben Murdochc5610432016-08-08 18:44:38 +01001622
1623 __ Bind(&stack_check_interrupt);
1624 {
1625 Node* context = __ GetContext();
1626 __ CallRuntime(Runtime::kStackGuard, context);
1627 __ Dispatch();
1628 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001629}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001630
1631// Throw
1632//
1633// Throws the exception in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001634void Interpreter::DoThrow(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001635 Node* exception = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001636 Node* context = __ GetContext();
1637 __ CallRuntime(Runtime::kThrow, context, exception);
1638 // We shouldn't ever return from a throw.
1639 __ Abort(kUnexpectedReturnFromThrow);
1640}
1641
1642
1643// ReThrow
1644//
1645// Re-throws the exception in the accumulator.
1646void Interpreter::DoReThrow(InterpreterAssembler* assembler) {
1647 Node* exception = __ GetAccumulator();
1648 Node* context = __ GetContext();
1649 __ CallRuntime(Runtime::kReThrow, context, exception);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001650 // We shouldn't ever return from a throw.
1651 __ Abort(kUnexpectedReturnFromThrow);
1652}
1653
1654
1655// Return
1656//
1657// Return the value in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001658void Interpreter::DoReturn(InterpreterAssembler* assembler) {
Ben Murdochc5610432016-08-08 18:44:38 +01001659 __ UpdateInterruptBudgetOnReturn();
1660 Node* accumulator = __ GetAccumulator();
1661 __ Return(accumulator);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001662}
1663
Ben Murdoch097c5b22016-05-18 11:27:45 +01001664// Debugger
1665//
1666// Call runtime to handle debugger statement.
1667void Interpreter::DoDebugger(InterpreterAssembler* assembler) {
1668 Node* context = __ GetContext();
1669 __ CallRuntime(Runtime::kHandleDebuggerStatement, context);
1670 __ Dispatch();
1671}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001672
Ben Murdoch097c5b22016-05-18 11:27:45 +01001673// DebugBreak
1674//
1675// Call runtime to handle a debug break.
Ben Murdochda12d292016-06-02 14:46:10 +01001676#define DEBUG_BREAK(Name, ...) \
1677 void Interpreter::Do##Name(InterpreterAssembler* assembler) { \
1678 Node* context = __ GetContext(); \
1679 Node* accumulator = __ GetAccumulator(); \
1680 Node* original_handler = \
1681 __ CallRuntime(Runtime::kDebugBreakOnBytecode, context, accumulator); \
1682 __ DispatchToBytecodeHandler(original_handler); \
Ben Murdoch097c5b22016-05-18 11:27:45 +01001683 }
1684DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK);
1685#undef DEBUG_BREAK
1686
1687// ForInPrepare <cache_info_triple>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001688//
1689// Returns state for for..in loop execution based on the object in the
Ben Murdoch097c5b22016-05-18 11:27:45 +01001690// accumulator. The result is output in registers |cache_info_triple| to
1691// |cache_info_triple + 2|, with the registers holding cache_type, cache_array,
1692// and cache_length respectively.
1693void Interpreter::DoForInPrepare(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001694 Node* object = __ GetAccumulator();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001695 Node* context = __ GetContext();
1696 Node* result_triple = __ CallRuntime(Runtime::kForInPrepare, context, object);
1697
1698 // Set output registers:
1699 // 0 == cache_type, 1 == cache_array, 2 == cache_length
1700 Node* output_register = __ BytecodeOperandReg(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 for (int i = 0; i < 3; i++) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001702 Node* cache_info = __ Projection(i, result_triple);
1703 __ StoreRegister(cache_info, output_register);
1704 output_register = __ NextRegister(output_register);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001706 __ Dispatch();
1707}
1708
Ben Murdoch097c5b22016-05-18 11:27:45 +01001709// ForInNext <receiver> <index> <cache_info_pair>
1710//
1711// Returns the next enumerable property in the the accumulator.
1712void Interpreter::DoForInNext(InterpreterAssembler* assembler) {
1713 Node* receiver_reg = __ BytecodeOperandReg(0);
1714 Node* receiver = __ LoadRegister(receiver_reg);
1715 Node* index_reg = __ BytecodeOperandReg(1);
1716 Node* index = __ LoadRegister(index_reg);
1717 Node* cache_type_reg = __ BytecodeOperandReg(2);
1718 Node* cache_type = __ LoadRegister(cache_type_reg);
1719 Node* cache_array_reg = __ NextRegister(cache_type_reg);
1720 Node* cache_array = __ LoadRegister(cache_array_reg);
Ben Murdochda12d292016-06-02 14:46:10 +01001721
1722 // Load the next key from the enumeration array.
Ben Murdochc5610432016-08-08 18:44:38 +01001723 Node* key = __ LoadFixedArrayElement(cache_array, index, 0,
1724 CodeStubAssembler::SMI_PARAMETERS);
Ben Murdochda12d292016-06-02 14:46:10 +01001725
1726 // Check if we can use the for-in fast path potentially using the enum cache.
Ben Murdochc5610432016-08-08 18:44:38 +01001727 Label if_fast(assembler), if_slow(assembler, Label::kDeferred);
Ben Murdochda12d292016-06-02 14:46:10 +01001728 Node* receiver_map = __ LoadObjectField(receiver, HeapObject::kMapOffset);
1729 Node* condition = __ WordEqual(receiver_map, cache_type);
Ben Murdochc5610432016-08-08 18:44:38 +01001730 __ BranchIf(condition, &if_fast, &if_slow);
Ben Murdochda12d292016-06-02 14:46:10 +01001731 __ Bind(&if_fast);
1732 {
1733 // Enum cache in use for {receiver}, the {key} is definitely valid.
1734 __ SetAccumulator(key);
1735 __ Dispatch();
1736 }
1737 __ Bind(&if_slow);
1738 {
1739 // Record the fact that we hit the for-in slow path.
1740 Node* vector_index = __ BytecodeOperandIdx(3);
1741 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
1742 Node* megamorphic_sentinel =
1743 __ HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate_));
Ben Murdochc5610432016-08-08 18:44:38 +01001744 __ StoreFixedArrayElement(type_feedback_vector, vector_index,
1745 megamorphic_sentinel, SKIP_WRITE_BARRIER);
Ben Murdochda12d292016-06-02 14:46:10 +01001746
1747 // Need to filter the {key} for the {receiver}.
1748 Node* context = __ GetContext();
1749 Node* result =
1750 __ CallRuntime(Runtime::kForInFilter, context, receiver, key);
1751 __ SetAccumulator(result);
1752 __ Dispatch();
1753 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001754}
1755
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756// ForInDone <index> <cache_length>
1757//
1758// Returns true if the end of the enumerable properties has been reached.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001759void Interpreter::DoForInDone(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001760 Node* index_reg = __ BytecodeOperandReg(0);
1761 Node* index = __ LoadRegister(index_reg);
1762 Node* cache_length_reg = __ BytecodeOperandReg(1);
1763 Node* cache_length = __ LoadRegister(cache_length_reg);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764
Ben Murdochda12d292016-06-02 14:46:10 +01001765 // Check if {index} is at {cache_length} already.
Ben Murdochc5610432016-08-08 18:44:38 +01001766 Label if_true(assembler), if_false(assembler), end(assembler);
1767 __ BranchIfWordEqual(index, cache_length, &if_true, &if_false);
Ben Murdochda12d292016-06-02 14:46:10 +01001768 __ Bind(&if_true);
1769 {
Ben Murdochc5610432016-08-08 18:44:38 +01001770 __ SetAccumulator(__ BooleanConstant(true));
1771 __ Goto(&end);
Ben Murdochda12d292016-06-02 14:46:10 +01001772 }
1773 __ Bind(&if_false);
1774 {
Ben Murdochc5610432016-08-08 18:44:38 +01001775 __ SetAccumulator(__ BooleanConstant(false));
1776 __ Goto(&end);
Ben Murdochda12d292016-06-02 14:46:10 +01001777 }
Ben Murdochc5610432016-08-08 18:44:38 +01001778 __ Bind(&end);
1779 __ Dispatch();
Ben Murdochda12d292016-06-02 14:46:10 +01001780}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001781
1782// ForInStep <index>
1783//
1784// Increments the loop counter in register |index| and stores the result
1785// in the accumulator.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001786void Interpreter::DoForInStep(InterpreterAssembler* assembler) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001787 Node* index_reg = __ BytecodeOperandReg(0);
1788 Node* index = __ LoadRegister(index_reg);
Ben Murdochda12d292016-06-02 14:46:10 +01001789 Node* one = __ SmiConstant(Smi::FromInt(1));
1790 Node* result = __ SmiAdd(index, one);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 __ SetAccumulator(result);
1792 __ Dispatch();
1793}
1794
Ben Murdochda12d292016-06-02 14:46:10 +01001795// Wide
1796//
1797// Prefix bytecode indicating next bytecode has wide (16-bit) operands.
1798void Interpreter::DoWide(InterpreterAssembler* assembler) {
1799 __ DispatchWide(OperandScale::kDouble);
1800}
1801
1802// ExtraWide
1803//
1804// Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands.
1805void Interpreter::DoExtraWide(InterpreterAssembler* assembler) {
1806 __ DispatchWide(OperandScale::kQuadruple);
1807}
1808
1809// Illegal
1810//
1811// An invalid bytecode aborting execution if dispatched.
1812void Interpreter::DoIllegal(InterpreterAssembler* assembler) {
1813 __ Abort(kInvalidBytecode);
1814}
1815
Ben Murdochc5610432016-08-08 18:44:38 +01001816// Nop
1817//
1818// No operation.
1819void Interpreter::DoNop(InterpreterAssembler* assembler) { __ Dispatch(); }
1820
1821// SuspendGenerator <generator>
1822//
1823// Exports the register file and stores it into the generator. Also stores the
1824// current context and the state given in the accumulator into the generator.
1825void Interpreter::DoSuspendGenerator(InterpreterAssembler* assembler) {
1826 Node* generator_reg = __ BytecodeOperandReg(0);
1827 Node* generator = __ LoadRegister(generator_reg);
1828
1829 Node* array =
1830 __ LoadObjectField(generator, JSGeneratorObject::kOperandStackOffset);
1831 Node* context = __ GetContext();
1832 Node* state = __ GetAccumulator();
1833
1834 __ ExportRegisterFile(array);
1835 __ StoreObjectField(generator, JSGeneratorObject::kContextOffset, context);
1836 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, state);
1837
1838 __ Dispatch();
1839}
1840
1841// ResumeGenerator <generator>
1842//
1843// Imports the register file stored in the generator. Also loads the
1844// generator's state and stores it in the accumulator, before overwriting it
1845// with kGeneratorExecuting.
1846void Interpreter::DoResumeGenerator(InterpreterAssembler* assembler) {
1847 Node* generator_reg = __ BytecodeOperandReg(0);
1848 Node* generator = __ LoadRegister(generator_reg);
1849
1850 __ ImportRegisterFile(
1851 __ LoadObjectField(generator, JSGeneratorObject::kOperandStackOffset));
1852
1853 Node* old_state =
1854 __ LoadObjectField(generator, JSGeneratorObject::kContinuationOffset);
1855 Node* new_state = __ Int32Constant(JSGeneratorObject::kGeneratorExecuting);
1856 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset,
1857 __ SmiTag(new_state));
1858 __ SetAccumulator(old_state);
1859
1860 __ Dispatch();
1861}
1862
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001863} // namespace interpreter
1864} // namespace internal
1865} // namespace v8