blob: 574602b0ed833add1261b02595de049b1bb9c805 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/interpreter.h"
6
7#include "src/code-factory.h"
8#include "src/compiler.h"
9#include "src/compiler/interpreter-assembler.h"
10#include "src/factory.h"
11#include "src/interpreter/bytecode-generator.h"
12#include "src/interpreter/bytecodes.h"
13#include "src/zone.h"
14
15namespace v8 {
16namespace internal {
17namespace interpreter {
18
19using compiler::Node;
20
21#define __ assembler->
22
23
24Interpreter::Interpreter(Isolate* isolate)
25 : isolate_(isolate) {}
26
27
28// static
29Handle<FixedArray> Interpreter::CreateUninitializedInterpreterTable(
30 Isolate* isolate) {
31 Handle<FixedArray> handler_table = isolate->factory()->NewFixedArray(
32 static_cast<int>(Bytecode::kLast) + 1, TENURED);
33 // We rely on the interpreter handler table being immovable, so check that
34 // it was allocated on the first page (which is always immovable).
35 DCHECK(isolate->heap()->old_space()->FirstPage()->Contains(
36 handler_table->address()));
37 return handler_table;
38}
39
40
41void Interpreter::Initialize() {
42 DCHECK(FLAG_ignition);
43 Handle<FixedArray> handler_table = isolate_->factory()->interpreter_table();
44 if (!IsInterpreterTableInitialized(handler_table)) {
45 Zone zone;
46 HandleScope scope(isolate_);
47
48#define GENERATE_CODE(Name, ...) \
49 { \
50 compiler::InterpreterAssembler assembler(isolate_, &zone, \
51 Bytecode::k##Name); \
52 Do##Name(&assembler); \
53 Handle<Code> code = assembler.GenerateCode(); \
54 handler_table->set(static_cast<int>(Bytecode::k##Name), *code); \
55 }
56 BYTECODE_LIST(GENERATE_CODE)
57#undef GENERATE_CODE
58 }
59}
60
61
62bool Interpreter::MakeBytecode(CompilationInfo* info) {
63 BytecodeGenerator generator(info->isolate(), info->zone());
64 info->EnsureFeedbackVector();
65 Handle<BytecodeArray> bytecodes = generator.MakeBytecode(info);
66 if (FLAG_print_bytecode) {
67 OFStream os(stdout);
68 os << "Function: " << info->GetDebugName().get() << std::endl;
69 bytecodes->Print(os);
70 os << std::flush;
71 }
72
73 info->SetBytecodeArray(bytecodes);
74 info->SetCode(info->isolate()->builtins()->InterpreterEntryTrampoline());
75 return true;
76}
77
78
79bool Interpreter::IsInterpreterTableInitialized(
80 Handle<FixedArray> handler_table) {
81 DCHECK(handler_table->length() == static_cast<int>(Bytecode::kLast) + 1);
82 return handler_table->get(0) != isolate_->heap()->undefined_value();
83}
84
85
86// LdaZero
87//
88// Load literal '0' into the accumulator.
89void Interpreter::DoLdaZero(compiler::InterpreterAssembler* assembler) {
90 Node* zero_value = __ NumberConstant(0.0);
91 __ SetAccumulator(zero_value);
92 __ Dispatch();
93}
94
95
96// LdaSmi8 <imm8>
97//
98// Load an 8-bit integer literal into the accumulator as a Smi.
99void Interpreter::DoLdaSmi8(compiler::InterpreterAssembler* assembler) {
100 Node* raw_int = __ BytecodeOperandImm(0);
101 Node* smi_int = __ SmiTag(raw_int);
102 __ SetAccumulator(smi_int);
103 __ Dispatch();
104}
105
106
107void Interpreter::DoLoadConstant(compiler::InterpreterAssembler* assembler) {
108 Node* index = __ BytecodeOperandIdx(0);
109 Node* constant = __ LoadConstantPoolEntry(index);
110 __ SetAccumulator(constant);
111 __ Dispatch();
112}
113
114
115// LdaConstant <idx>
116//
117// Load constant literal at |idx| in the constant pool into the accumulator.
118void Interpreter::DoLdaConstant(compiler::InterpreterAssembler* assembler) {
119 DoLoadConstant(assembler);
120}
121
122
123// LdaConstantWide <idx>
124//
125// Load constant literal at |idx| in the constant pool into the accumulator.
126void Interpreter::DoLdaConstantWide(compiler::InterpreterAssembler* assembler) {
127 DoLoadConstant(assembler);
128}
129
130
131// LdaUndefined
132//
133// Load Undefined into the accumulator.
134void Interpreter::DoLdaUndefined(compiler::InterpreterAssembler* assembler) {
135 Node* undefined_value =
136 __ HeapConstant(isolate_->factory()->undefined_value());
137 __ SetAccumulator(undefined_value);
138 __ Dispatch();
139}
140
141
142// LdaNull
143//
144// Load Null into the accumulator.
145void Interpreter::DoLdaNull(compiler::InterpreterAssembler* assembler) {
146 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
147 __ SetAccumulator(null_value);
148 __ Dispatch();
149}
150
151
152// LdaTheHole
153//
154// Load TheHole into the accumulator.
155void Interpreter::DoLdaTheHole(compiler::InterpreterAssembler* assembler) {
156 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
157 __ SetAccumulator(the_hole_value);
158 __ Dispatch();
159}
160
161
162// LdaTrue
163//
164// Load True into the accumulator.
165void Interpreter::DoLdaTrue(compiler::InterpreterAssembler* assembler) {
166 Node* true_value = __ HeapConstant(isolate_->factory()->true_value());
167 __ SetAccumulator(true_value);
168 __ Dispatch();
169}
170
171
172// LdaFalse
173//
174// Load False into the accumulator.
175void Interpreter::DoLdaFalse(compiler::InterpreterAssembler* assembler) {
176 Node* false_value = __ HeapConstant(isolate_->factory()->false_value());
177 __ SetAccumulator(false_value);
178 __ Dispatch();
179}
180
181
182// Ldar <src>
183//
184// Load accumulator with value from register <src>.
185void Interpreter::DoLdar(compiler::InterpreterAssembler* assembler) {
186 Node* reg_index = __ BytecodeOperandReg(0);
187 Node* value = __ LoadRegister(reg_index);
188 __ SetAccumulator(value);
189 __ Dispatch();
190}
191
192
193// Star <dst>
194//
195// Store accumulator to register <dst>.
196void Interpreter::DoStar(compiler::InterpreterAssembler* assembler) {
197 Node* reg_index = __ BytecodeOperandReg(0);
198 Node* accumulator = __ GetAccumulator();
199 __ StoreRegister(accumulator, reg_index);
200 __ Dispatch();
201}
202
203
204// Exchange <reg8> <reg16>
205//
206// Exchange two registers.
207void Interpreter::DoExchange(compiler::InterpreterAssembler* assembler) {
208 Node* reg0_index = __ BytecodeOperandReg(0);
209 Node* reg1_index = __ BytecodeOperandReg(1);
210 Node* reg0_value = __ LoadRegister(reg0_index);
211 Node* reg1_value = __ LoadRegister(reg1_index);
212 __ StoreRegister(reg1_value, reg0_index);
213 __ StoreRegister(reg0_value, reg1_index);
214 __ Dispatch();
215}
216
217
218// ExchangeWide <reg16> <reg16>
219//
220// Exchange two registers.
221void Interpreter::DoExchangeWide(compiler::InterpreterAssembler* assembler) {
222 return DoExchange(assembler);
223}
224
225
226// Mov <src> <dst>
227//
228// Stores the value of register <src> to register <dst>.
229void Interpreter::DoMov(compiler::InterpreterAssembler* assembler) {
230 Node* src_index = __ BytecodeOperandReg(0);
231 Node* src_value = __ LoadRegister(src_index);
232 Node* dst_index = __ BytecodeOperandReg(1);
233 __ StoreRegister(src_value, dst_index);
234 __ Dispatch();
235}
236
237
238void Interpreter::DoLoadGlobal(Callable ic,
239 compiler::InterpreterAssembler* assembler) {
240 // Get the global object.
241 Node* context = __ GetContext();
242 Node* native_context =
243 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
244 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
245
246 // Load the global via the LoadIC.
247 Node* code_target = __ HeapConstant(ic.code());
248 Node* constant_index = __ BytecodeOperandIdx(0);
249 Node* name = __ LoadConstantPoolEntry(constant_index);
250 Node* raw_slot = __ BytecodeOperandIdx(1);
251 Node* smi_slot = __ SmiTag(raw_slot);
252 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
253 Node* result = __ CallIC(ic.descriptor(), code_target, global, name, smi_slot,
254 type_feedback_vector);
255 __ SetAccumulator(result);
256 __ Dispatch();
257}
258
259
260// LdaGlobalSloppy <name_index> <slot>
261//
262// Load the global with name in constant pool entry <name_index> into the
263// accumulator using FeedBackVector slot <slot> in sloppy mode.
264void Interpreter::DoLdaGlobalSloppy(compiler::InterpreterAssembler* assembler) {
265 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
266 SLOPPY, UNINITIALIZED);
267 DoLoadGlobal(ic, assembler);
268}
269
270
271// LdaGlobalSloppy <name_index> <slot>
272//
273// Load the global with name in constant pool entry <name_index> into the
274// accumulator using FeedBackVector slot <slot> in strict mode.
275void Interpreter::DoLdaGlobalStrict(compiler::InterpreterAssembler* assembler) {
276 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
277 STRICT, UNINITIALIZED);
278 DoLoadGlobal(ic, assembler);
279}
280
281
282// LdaGlobalInsideTypeofSloppy <name_index> <slot>
283//
284// Load the global with name in constant pool entry <name_index> into the
285// accumulator using FeedBackVector slot <slot> in sloppy mode.
286void Interpreter::DoLdaGlobalInsideTypeofSloppy(
287 compiler::InterpreterAssembler* assembler) {
288 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
289 SLOPPY, UNINITIALIZED);
290 DoLoadGlobal(ic, assembler);
291}
292
293
294// LdaGlobalInsideTypeofStrict <name_index> <slot>
295//
296// Load the global with name in constant pool entry <name_index> into the
297// accumulator using FeedBackVector slot <slot> in strict mode.
298void Interpreter::DoLdaGlobalInsideTypeofStrict(
299 compiler::InterpreterAssembler* assembler) {
300 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
301 STRICT, UNINITIALIZED);
302 DoLoadGlobal(ic, assembler);
303}
304
305
306// LdaGlobalSloppyWide <name_index> <slot>
307//
308// Load the global with name in constant pool entry <name_index> into the
309// accumulator using FeedBackVector slot <slot> in sloppy mode.
310void Interpreter::DoLdaGlobalSloppyWide(
311 compiler::InterpreterAssembler* assembler) {
312 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
313 SLOPPY, UNINITIALIZED);
314 DoLoadGlobal(ic, assembler);
315}
316
317
318// LdaGlobalSloppyWide <name_index> <slot>
319//
320// Load the global with name in constant pool entry <name_index> into the
321// accumulator using FeedBackVector slot <slot> in strict mode.
322void Interpreter::DoLdaGlobalStrictWide(
323 compiler::InterpreterAssembler* assembler) {
324 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
325 STRICT, UNINITIALIZED);
326 DoLoadGlobal(ic, assembler);
327}
328
329
330// LdaGlobalInsideTypeofSloppyWide <name_index> <slot>
331//
332// Load the global with name in constant pool entry <name_index> into the
333// accumulator using FeedBackVector slot <slot> in sloppy mode.
334void Interpreter::DoLdaGlobalInsideTypeofSloppyWide(
335 compiler::InterpreterAssembler* assembler) {
336 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
337 SLOPPY, UNINITIALIZED);
338 DoLoadGlobal(ic, assembler);
339}
340
341
342// LdaGlobalInsideTypeofSloppyWide <name_index> <slot>
343//
344// Load the global with name in constant pool entry <name_index> into the
345// accumulator using FeedBackVector slot <slot> in strict mode.
346void Interpreter::DoLdaGlobalInsideTypeofStrictWide(
347 compiler::InterpreterAssembler* assembler) {
348 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
349 STRICT, UNINITIALIZED);
350 DoLoadGlobal(ic, assembler);
351}
352
353
354void Interpreter::DoStoreGlobal(Callable ic,
355 compiler::InterpreterAssembler* assembler) {
356 // Get the global object.
357 Node* context = __ GetContext();
358 Node* native_context =
359 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
360 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
361
362 // Store the global via the StoreIC.
363 Node* code_target = __ HeapConstant(ic.code());
364 Node* constant_index = __ BytecodeOperandIdx(0);
365 Node* name = __ LoadConstantPoolEntry(constant_index);
366 Node* value = __ GetAccumulator();
367 Node* raw_slot = __ BytecodeOperandIdx(1);
368 Node* smi_slot = __ SmiTag(raw_slot);
369 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
370 __ CallIC(ic.descriptor(), code_target, global, name, value, smi_slot,
371 type_feedback_vector);
372
373 __ Dispatch();
374}
375
376
377// StaGlobalSloppy <name_index> <slot>
378//
379// Store the value in the accumulator into the global with name in constant pool
380// entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
381void Interpreter::DoStaGlobalSloppy(compiler::InterpreterAssembler* assembler) {
382 Callable ic =
383 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
384 DoStoreGlobal(ic, assembler);
385}
386
387
388// StaGlobalStrict <name_index> <slot>
389//
390// Store the value in the accumulator into the global with name in constant pool
391// entry <name_index> using FeedBackVector slot <slot> in strict mode.
392void Interpreter::DoStaGlobalStrict(compiler::InterpreterAssembler* assembler) {
393 Callable ic =
394 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
395 DoStoreGlobal(ic, assembler);
396}
397
398
399// StaGlobalSloppyWide <name_index> <slot>
400//
401// Store the value in the accumulator into the global with name in constant pool
402// entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
403void Interpreter::DoStaGlobalSloppyWide(
404 compiler::InterpreterAssembler* assembler) {
405 Callable ic =
406 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
407 DoStoreGlobal(ic, assembler);
408}
409
410
411// StaGlobalStrictWide <name_index> <slot>
412//
413// Store the value in the accumulator into the global with name in constant pool
414// entry <name_index> using FeedBackVector slot <slot> in strict mode.
415void Interpreter::DoStaGlobalStrictWide(
416 compiler::InterpreterAssembler* assembler) {
417 Callable ic =
418 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
419 DoStoreGlobal(ic, assembler);
420}
421
422
423// LdaContextSlot <context> <slot_index>
424//
425// Load the object in |slot_index| of |context| into the accumulator.
426void Interpreter::DoLdaContextSlot(compiler::InterpreterAssembler* assembler) {
427 Node* reg_index = __ BytecodeOperandReg(0);
428 Node* context = __ LoadRegister(reg_index);
429 Node* slot_index = __ BytecodeOperandIdx(1);
430 Node* result = __ LoadContextSlot(context, slot_index);
431 __ SetAccumulator(result);
432 __ Dispatch();
433}
434
435
436// LdaContextSlotWide <context> <slot_index>
437//
438// Load the object in |slot_index| of |context| into the accumulator.
439void Interpreter::DoLdaContextSlotWide(
440 compiler::InterpreterAssembler* assembler) {
441 DoLdaContextSlot(assembler);
442}
443
444
445// StaContextSlot <context> <slot_index>
446//
447// Stores the object in the accumulator into |slot_index| of |context|.
448void Interpreter::DoStaContextSlot(compiler::InterpreterAssembler* assembler) {
449 Node* value = __ GetAccumulator();
450 Node* reg_index = __ BytecodeOperandReg(0);
451 Node* context = __ LoadRegister(reg_index);
452 Node* slot_index = __ BytecodeOperandIdx(1);
453 __ StoreContextSlot(context, slot_index, value);
454 __ Dispatch();
455}
456
457
458// StaContextSlot <context> <slot_index>
459//
460// Stores the object in the accumulator into |slot_index| of |context|.
461void Interpreter::DoStaContextSlotWide(
462 compiler::InterpreterAssembler* assembler) {
463 DoStaContextSlot(assembler);
464}
465
466
467void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id,
468 compiler::InterpreterAssembler* assembler) {
469 Node* index = __ BytecodeOperandIdx(0);
470 Node* name = __ LoadConstantPoolEntry(index);
471 Node* context = __ GetContext();
472 Node* result_pair = __ CallRuntime(function_id, context, name);
473 Node* result = __ Projection(0, result_pair);
474 __ SetAccumulator(result);
475 __ Dispatch();
476}
477
478
479// LdaLookupSlot <name_index>
480//
481// Lookup the object with the name in constant pool entry |name_index|
482// dynamically.
483void Interpreter::DoLdaLookupSlot(compiler::InterpreterAssembler* assembler) {
484 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler);
485}
486
487
488// LdaLookupSlotInsideTypeof <name_index>
489//
490// Lookup the object with the name in constant pool entry |name_index|
491// dynamically without causing a NoReferenceError.
492void Interpreter::DoLdaLookupSlotInsideTypeof(
493 compiler::InterpreterAssembler* assembler) {
494 DoLoadLookupSlot(Runtime::kLoadLookupSlotNoReferenceError, assembler);
495}
496
497
498// LdaLookupSlotWide <name_index>
499//
500// Lookup the object with the name in constant pool entry |name_index|
501// dynamically.
502void Interpreter::DoLdaLookupSlotWide(
503 compiler::InterpreterAssembler* assembler) {
504 DoLdaLookupSlot(assembler);
505}
506
507
508// LdaLookupSlotInsideTypeofWide <name_index>
509//
510// Lookup the object with the name in constant pool entry |name_index|
511// dynamically without causing a NoReferenceError.
512void Interpreter::DoLdaLookupSlotInsideTypeofWide(
513 compiler::InterpreterAssembler* assembler) {
514 DoLdaLookupSlotInsideTypeof(assembler);
515}
516
517
518void Interpreter::DoStoreLookupSlot(LanguageMode language_mode,
519 compiler::InterpreterAssembler* assembler) {
520 Node* value = __ GetAccumulator();
521 Node* index = __ BytecodeOperandIdx(0);
522 Node* name = __ LoadConstantPoolEntry(index);
523 Node* context = __ GetContext();
524 Node* language_mode_node = __ NumberConstant(language_mode);
525 Node* result = __ CallRuntime(Runtime::kStoreLookupSlot, value, context, name,
526 language_mode_node);
527 __ SetAccumulator(result);
528 __ Dispatch();
529}
530
531
532// StaLookupSlotSloppy <name_index>
533//
534// Store the object in accumulator to the object with the name in constant
535// pool entry |name_index| in sloppy mode.
536void Interpreter::DoStaLookupSlotSloppy(
537 compiler::InterpreterAssembler* assembler) {
538 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler);
539}
540
541
542// StaLookupSlotStrict <name_index>
543//
544// Store the object in accumulator to the object with the name in constant
545// pool entry |name_index| in strict mode.
546void Interpreter::DoStaLookupSlotStrict(
547 compiler::InterpreterAssembler* assembler) {
548 DoStoreLookupSlot(LanguageMode::STRICT, assembler);
549}
550
551
552// StaLookupSlotSloppyWide <name_index>
553//
554// Store the object in accumulator to the object with the name in constant
555// pool entry |name_index| in sloppy mode.
556void Interpreter::DoStaLookupSlotSloppyWide(
557 compiler::InterpreterAssembler* assembler) {
558 DoStaLookupSlotSloppy(assembler);
559}
560
561
562// StaLookupSlotStrictWide <name_index>
563//
564// Store the object in accumulator to the object with the name in constant
565// pool entry |name_index| in strict mode.
566void Interpreter::DoStaLookupSlotStrictWide(
567 compiler::InterpreterAssembler* assembler) {
568 DoStaLookupSlotStrict(assembler);
569}
570
571
572void Interpreter::DoLoadIC(Callable ic,
573 compiler::InterpreterAssembler* assembler) {
574 Node* code_target = __ HeapConstant(ic.code());
575 Node* register_index = __ BytecodeOperandReg(0);
576 Node* object = __ LoadRegister(register_index);
577 Node* constant_index = __ BytecodeOperandIdx(1);
578 Node* name = __ LoadConstantPoolEntry(constant_index);
579 Node* raw_slot = __ BytecodeOperandIdx(2);
580 Node* smi_slot = __ SmiTag(raw_slot);
581 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
582 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot,
583 type_feedback_vector);
584 __ SetAccumulator(result);
585 __ Dispatch();
586}
587
588
589// LoadICSloppy <object> <name_index> <slot>
590//
591// Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
592// the name at constant pool entry <name_index>.
593void Interpreter::DoLoadICSloppy(compiler::InterpreterAssembler* assembler) {
594 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
595 SLOPPY, UNINITIALIZED);
596 DoLoadIC(ic, assembler);
597}
598
599
600// LoadICStrict <object> <name_index> <slot>
601//
602// Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
603// the name at constant pool entry <name_index>.
604void Interpreter::DoLoadICStrict(compiler::InterpreterAssembler* assembler) {
605 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
606 STRICT, UNINITIALIZED);
607 DoLoadIC(ic, assembler);
608}
609
610
611// LoadICSloppyWide <object> <name_index> <slot>
612//
613// Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
614// the name at constant pool entry <name_index>.
615void Interpreter::DoLoadICSloppyWide(
616 compiler::InterpreterAssembler* assembler) {
617 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
618 SLOPPY, UNINITIALIZED);
619 DoLoadIC(ic, assembler);
620}
621
622
623// LoadICStrictWide <object> <name_index> <slot>
624//
625// Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and
626// the name at constant pool entry <name_index>.
627void Interpreter::DoLoadICStrictWide(
628 compiler::InterpreterAssembler* assembler) {
629 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
630 STRICT, UNINITIALIZED);
631 DoLoadIC(ic, assembler);
632}
633
634
635void Interpreter::DoKeyedLoadIC(Callable ic,
636 compiler::InterpreterAssembler* assembler) {
637 Node* code_target = __ HeapConstant(ic.code());
638 Node* reg_index = __ BytecodeOperandReg(0);
639 Node* object = __ LoadRegister(reg_index);
640 Node* name = __ GetAccumulator();
641 Node* raw_slot = __ BytecodeOperandIdx(1);
642 Node* smi_slot = __ SmiTag(raw_slot);
643 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
644 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot,
645 type_feedback_vector);
646 __ SetAccumulator(result);
647 __ Dispatch();
648}
649
650
651// KeyedLoadICSloppy <object> <slot>
652//
653// Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
654// and the key in the accumulator.
655void Interpreter::DoKeyedLoadICSloppy(
656 compiler::InterpreterAssembler* assembler) {
657 Callable ic =
658 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
659 DoKeyedLoadIC(ic, assembler);
660}
661
662
663// KeyedLoadICStrict <object> <slot>
664//
665// Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
666// and the key in the accumulator.
667void Interpreter::DoKeyedLoadICStrict(
668 compiler::InterpreterAssembler* assembler) {
669 Callable ic =
670 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
671 DoKeyedLoadIC(ic, assembler);
672}
673
674
675// KeyedLoadICSloppyWide <object> <slot>
676//
677// Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
678// and the key in the accumulator.
679void Interpreter::DoKeyedLoadICSloppyWide(
680 compiler::InterpreterAssembler* assembler) {
681 Callable ic =
682 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
683 DoKeyedLoadIC(ic, assembler);
684}
685
686
687// KeyedLoadICStrictWide <object> <slot>
688//
689// Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object>
690// and the key in the accumulator.
691void Interpreter::DoKeyedLoadICStrictWide(
692 compiler::InterpreterAssembler* assembler) {
693 Callable ic =
694 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
695 DoKeyedLoadIC(ic, assembler);
696}
697
698
699void Interpreter::DoStoreIC(Callable ic,
700 compiler::InterpreterAssembler* assembler) {
701 Node* code_target = __ HeapConstant(ic.code());
702 Node* object_reg_index = __ BytecodeOperandReg(0);
703 Node* object = __ LoadRegister(object_reg_index);
704 Node* constant_index = __ BytecodeOperandIdx(1);
705 Node* name = __ LoadConstantPoolEntry(constant_index);
706 Node* value = __ GetAccumulator();
707 Node* raw_slot = __ BytecodeOperandIdx(2);
708 Node* smi_slot = __ SmiTag(raw_slot);
709 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
710 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot,
711 type_feedback_vector);
712 __ Dispatch();
713}
714
715
716// StoreICSloppy <object> <name_index> <slot>
717//
718// Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
719// the name in constant pool entry <name_index> with the value in the
720// accumulator.
721void Interpreter::DoStoreICSloppy(compiler::InterpreterAssembler* assembler) {
722 Callable ic =
723 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
724 DoStoreIC(ic, assembler);
725}
726
727
728// StoreICStrict <object> <name_index> <slot>
729//
730// Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
731// the name in constant pool entry <name_index> with the value in the
732// accumulator.
733void Interpreter::DoStoreICStrict(compiler::InterpreterAssembler* assembler) {
734 Callable ic =
735 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
736 DoStoreIC(ic, assembler);
737}
738
739
740// StoreICSloppyWide <object> <name_index> <slot>
741//
742// Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
743// the name in constant pool entry <name_index> with the value in the
744// accumulator.
745void Interpreter::DoStoreICSloppyWide(
746 compiler::InterpreterAssembler* assembler) {
747 Callable ic =
748 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
749 DoStoreIC(ic, assembler);
750}
751
752
753// StoreICStrictWide <object> <name_index> <slot>
754//
755// Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
756// the name in constant pool entry <name_index> with the value in the
757// accumulator.
758void Interpreter::DoStoreICStrictWide(
759 compiler::InterpreterAssembler* assembler) {
760 Callable ic =
761 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
762 DoStoreIC(ic, assembler);
763}
764
765
766void Interpreter::DoKeyedStoreIC(Callable ic,
767 compiler::InterpreterAssembler* assembler) {
768 Node* code_target = __ HeapConstant(ic.code());
769 Node* object_reg_index = __ BytecodeOperandReg(0);
770 Node* object = __ LoadRegister(object_reg_index);
771 Node* name_reg_index = __ BytecodeOperandReg(1);
772 Node* name = __ LoadRegister(name_reg_index);
773 Node* value = __ GetAccumulator();
774 Node* raw_slot = __ BytecodeOperandIdx(2);
775 Node* smi_slot = __ SmiTag(raw_slot);
776 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
777 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot,
778 type_feedback_vector);
779 __ Dispatch();
780}
781
782
783// KeyedStoreICSloppy <object> <key> <slot>
784//
785// Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
786// and the key <key> with the value in the accumulator.
787void Interpreter::DoKeyedStoreICSloppy(
788 compiler::InterpreterAssembler* assembler) {
789 Callable ic =
790 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
791 DoKeyedStoreIC(ic, assembler);
792}
793
794
795// KeyedStoreICStore <object> <key> <slot>
796//
797// Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
798// and the key <key> with the value in the accumulator.
799void Interpreter::DoKeyedStoreICStrict(
800 compiler::InterpreterAssembler* assembler) {
801 Callable ic =
802 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
803 DoKeyedStoreIC(ic, assembler);
804}
805
806
807// KeyedStoreICSloppyWide <object> <key> <slot>
808//
809// Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
810// and the key <key> with the value in the accumulator.
811void Interpreter::DoKeyedStoreICSloppyWide(
812 compiler::InterpreterAssembler* assembler) {
813 Callable ic =
814 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
815 DoKeyedStoreIC(ic, assembler);
816}
817
818
819// KeyedStoreICStoreWide <object> <key> <slot>
820//
821// Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
822// and the key <key> with the value in the accumulator.
823void Interpreter::DoKeyedStoreICStrictWide(
824 compiler::InterpreterAssembler* assembler) {
825 Callable ic =
826 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
827 DoKeyedStoreIC(ic, assembler);
828}
829
830
831// PushContext <context>
832//
833// Pushes the accumulator as the current context, and saves it in <context>
834void Interpreter::DoPushContext(compiler::InterpreterAssembler* assembler) {
835 Node* reg_index = __ BytecodeOperandReg(0);
836 Node* context = __ GetAccumulator();
837 __ SetContext(context);
838 __ StoreRegister(context, reg_index);
839 __ Dispatch();
840}
841
842
843// PopContext <context>
844//
845// Pops the current context and sets <context> as the new context.
846void Interpreter::DoPopContext(compiler::InterpreterAssembler* assembler) {
847 Node* reg_index = __ BytecodeOperandReg(0);
848 Node* context = __ LoadRegister(reg_index);
849 __ SetContext(context);
850 __ Dispatch();
851}
852
853
854void Interpreter::DoBinaryOp(Runtime::FunctionId function_id,
855 compiler::InterpreterAssembler* assembler) {
856 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized
857 // operations, instead of calling builtins directly.
858 Node* reg_index = __ BytecodeOperandReg(0);
859 Node* lhs = __ LoadRegister(reg_index);
860 Node* rhs = __ GetAccumulator();
861 Node* result = __ CallRuntime(function_id, lhs, rhs);
862 __ SetAccumulator(result);
863 __ Dispatch();
864}
865
866
867// Add <src>
868//
869// Add register <src> to accumulator.
870void Interpreter::DoAdd(compiler::InterpreterAssembler* assembler) {
871 DoBinaryOp(Runtime::kAdd, assembler);
872}
873
874
875// Sub <src>
876//
877// Subtract register <src> from accumulator.
878void Interpreter::DoSub(compiler::InterpreterAssembler* assembler) {
879 DoBinaryOp(Runtime::kSubtract, assembler);
880}
881
882
883// Mul <src>
884//
885// Multiply accumulator by register <src>.
886void Interpreter::DoMul(compiler::InterpreterAssembler* assembler) {
887 DoBinaryOp(Runtime::kMultiply, assembler);
888}
889
890
891// Div <src>
892//
893// Divide register <src> by accumulator.
894void Interpreter::DoDiv(compiler::InterpreterAssembler* assembler) {
895 DoBinaryOp(Runtime::kDivide, assembler);
896}
897
898
899// Mod <src>
900//
901// Modulo register <src> by accumulator.
902void Interpreter::DoMod(compiler::InterpreterAssembler* assembler) {
903 DoBinaryOp(Runtime::kModulus, assembler);
904}
905
906
907// BitwiseOr <src>
908//
909// BitwiseOr register <src> to accumulator.
910void Interpreter::DoBitwiseOr(compiler::InterpreterAssembler* assembler) {
911 DoBinaryOp(Runtime::kBitwiseOr, assembler);
912}
913
914
915// BitwiseXor <src>
916//
917// BitwiseXor register <src> to accumulator.
918void Interpreter::DoBitwiseXor(compiler::InterpreterAssembler* assembler) {
919 DoBinaryOp(Runtime::kBitwiseXor, assembler);
920}
921
922
923// BitwiseAnd <src>
924//
925// BitwiseAnd register <src> to accumulator.
926void Interpreter::DoBitwiseAnd(compiler::InterpreterAssembler* assembler) {
927 DoBinaryOp(Runtime::kBitwiseAnd, assembler);
928}
929
930
931// ShiftLeft <src>
932//
933// Left shifts register <src> by the count specified in the accumulator.
934// Register <src> is converted to an int32 and the accumulator to uint32
935// before the operation. 5 lsb bits from the accumulator are used as count
936// i.e. <src> << (accumulator & 0x1F).
937void Interpreter::DoShiftLeft(compiler::InterpreterAssembler* assembler) {
938 DoBinaryOp(Runtime::kShiftLeft, assembler);
939}
940
941
942// ShiftRight <src>
943//
944// Right shifts register <src> by the count specified in the accumulator.
945// Result is sign extended. Register <src> is converted to an int32 and the
946// accumulator to uint32 before the operation. 5 lsb bits from the accumulator
947// are used as count i.e. <src> >> (accumulator & 0x1F).
948void Interpreter::DoShiftRight(compiler::InterpreterAssembler* assembler) {
949 DoBinaryOp(Runtime::kShiftRight, assembler);
950}
951
952
953// ShiftRightLogical <src>
954//
955// Right Shifts register <src> by the count specified in the accumulator.
956// Result is zero-filled. The accumulator and register <src> are converted to
957// uint32 before the operation 5 lsb bits from the accumulator are used as
958// count i.e. <src> << (accumulator & 0x1F).
959void Interpreter::DoShiftRightLogical(
960 compiler::InterpreterAssembler* assembler) {
961 DoBinaryOp(Runtime::kShiftRightLogical, assembler);
962}
963
964
965void Interpreter::DoCountOp(Runtime::FunctionId function_id,
966 compiler::InterpreterAssembler* assembler) {
967 Node* value = __ GetAccumulator();
968 Node* one = __ NumberConstant(1);
969 Node* result = __ CallRuntime(function_id, value, one);
970 __ SetAccumulator(result);
971 __ Dispatch();
972}
973
974
975// Inc
976//
977// Increments value in the accumulator by one.
978void Interpreter::DoInc(compiler::InterpreterAssembler* assembler) {
979 DoCountOp(Runtime::kAdd, assembler);
980}
981
982
983// Dec
984//
985// Decrements value in the accumulator by one.
986void Interpreter::DoDec(compiler::InterpreterAssembler* assembler) {
987 DoCountOp(Runtime::kSubtract, assembler);
988}
989
990
991// LogicalNot
992//
993// Perform logical-not on the accumulator, first casting the
994// accumulator to a boolean value if required.
995void Interpreter::DoLogicalNot(compiler::InterpreterAssembler* assembler) {
996 Node* accumulator = __ GetAccumulator();
997 Node* result = __ CallRuntime(Runtime::kInterpreterLogicalNot, accumulator);
998 __ SetAccumulator(result);
999 __ Dispatch();
1000}
1001
1002
1003// TypeOf
1004//
1005// Load the accumulator with the string representating type of the
1006// object in the accumulator.
1007void Interpreter::DoTypeOf(compiler::InterpreterAssembler* assembler) {
1008 Node* accumulator = __ GetAccumulator();
1009 Node* result = __ CallRuntime(Runtime::kInterpreterTypeOf, accumulator);
1010 __ SetAccumulator(result);
1011 __ Dispatch();
1012}
1013
1014
1015void Interpreter::DoDelete(Runtime::FunctionId function_id,
1016 compiler::InterpreterAssembler* assembler) {
1017 Node* reg_index = __ BytecodeOperandReg(0);
1018 Node* object = __ LoadRegister(reg_index);
1019 Node* key = __ GetAccumulator();
1020 Node* result = __ CallRuntime(function_id, object, key);
1021 __ SetAccumulator(result);
1022 __ Dispatch();
1023}
1024
1025
1026// DeletePropertyStrict
1027//
1028// Delete the property specified in the accumulator from the object
1029// referenced by the register operand following strict mode semantics.
1030void Interpreter::DoDeletePropertyStrict(
1031 compiler::InterpreterAssembler* assembler) {
1032 DoDelete(Runtime::kDeleteProperty_Strict, assembler);
1033}
1034
1035
1036// DeletePropertySloppy
1037//
1038// Delete the property specified in the accumulator from the object
1039// referenced by the register operand following sloppy mode semantics.
1040void Interpreter::DoDeletePropertySloppy(
1041 compiler::InterpreterAssembler* assembler) {
1042 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler);
1043}
1044
1045
1046// DeleteLookupSlot
1047//
1048// Delete the variable with the name specified in the accumulator by dynamically
1049// looking it up.
1050void Interpreter::DoDeleteLookupSlot(
1051 compiler::InterpreterAssembler* assembler) {
1052 Node* name = __ GetAccumulator();
1053 Node* context = __ GetContext();
1054 Node* result = __ CallRuntime(Runtime::kDeleteLookupSlot, context, name);
1055 __ SetAccumulator(result);
1056 __ Dispatch();
1057}
1058
1059
1060void Interpreter::DoJSCall(compiler::InterpreterAssembler* assembler) {
1061 Node* function_reg = __ BytecodeOperandReg(0);
1062 Node* function = __ LoadRegister(function_reg);
1063 Node* receiver_reg = __ BytecodeOperandReg(1);
1064 Node* first_arg = __ RegisterLocation(receiver_reg);
1065 Node* args_count = __ BytecodeOperandCount(2);
1066 // TODO(rmcilroy): Use the call type feedback slot to call via CallIC.
1067 Node* result = __ CallJS(function, first_arg, args_count);
1068 __ SetAccumulator(result);
1069 __ Dispatch();
1070}
1071
1072
1073// Call <callable> <receiver> <arg_count>
1074//
1075// Call a JSfunction or Callable in |callable| with the |receiver| and
1076// |arg_count| arguments in subsequent registers.
1077void Interpreter::DoCall(compiler::InterpreterAssembler* assembler) {
1078 DoJSCall(assembler);
1079}
1080
1081
1082// CallWide <callable> <receiver> <arg_count>
1083//
1084// Call a JSfunction or Callable in |callable| with the |receiver| and
1085// |arg_count| arguments in subsequent registers.
1086void Interpreter::DoCallWide(compiler::InterpreterAssembler* assembler) {
1087 DoJSCall(assembler);
1088}
1089
1090
1091// CallRuntime <function_id> <first_arg> <arg_count>
1092//
1093// Call the runtime function |function_id| with the first argument in
1094// register |first_arg| and |arg_count| arguments in subsequent
1095// registers.
1096void Interpreter::DoCallRuntime(compiler::InterpreterAssembler* assembler) {
1097 Node* function_id = __ BytecodeOperandIdx(0);
1098 Node* first_arg_reg = __ BytecodeOperandReg(1);
1099 Node* first_arg = __ RegisterLocation(first_arg_reg);
1100 Node* args_count = __ BytecodeOperandCount(2);
1101 Node* result = __ CallRuntime(function_id, first_arg, args_count);
1102 __ SetAccumulator(result);
1103 __ Dispatch();
1104}
1105
1106
1107// CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
1108//
1109// Call the runtime function |function_id| which returns a pair, with the
1110// first argument in register |first_arg| and |arg_count| arguments in
1111// subsequent registers. Returns the result in <first_return> and
1112// <first_return + 1>
1113void Interpreter::DoCallRuntimeForPair(
1114 compiler::InterpreterAssembler* assembler) {
1115 // Call the runtime function.
1116 Node* function_id = __ BytecodeOperandIdx(0);
1117 Node* first_arg_reg = __ BytecodeOperandReg(1);
1118 Node* first_arg = __ RegisterLocation(first_arg_reg);
1119 Node* args_count = __ BytecodeOperandCount(2);
1120 Node* result_pair = __ CallRuntime(function_id, first_arg, args_count, 2);
1121
1122 // Store the results in <first_return> and <first_return + 1>
1123 Node* first_return_reg = __ BytecodeOperandReg(3);
1124 Node* second_return_reg = __ NextRegister(first_return_reg);
1125 Node* result0 = __ Projection(0, result_pair);
1126 Node* result1 = __ Projection(1, result_pair);
1127 __ StoreRegister(result0, first_return_reg);
1128 __ StoreRegister(result1, second_return_reg);
1129
1130 __ Dispatch();
1131}
1132
1133
1134// CallJSRuntime <context_index> <receiver> <arg_count>
1135//
1136// Call the JS runtime function that has the |context_index| with the receiver
1137// in register |receiver| and |arg_count| arguments in subsequent registers.
1138void Interpreter::DoCallJSRuntime(compiler::InterpreterAssembler* assembler) {
1139 Node* context_index = __ BytecodeOperandIdx(0);
1140 Node* receiver_reg = __ BytecodeOperandReg(1);
1141 Node* first_arg = __ RegisterLocation(receiver_reg);
1142 Node* args_count = __ BytecodeOperandCount(2);
1143
1144 // Get the function to call from the native context.
1145 Node* context = __ GetContext();
1146 Node* native_context =
1147 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
1148 Node* function = __ LoadContextSlot(native_context, context_index);
1149
1150 // Call the function.
1151 Node* result = __ CallJS(function, first_arg, args_count);
1152 __ SetAccumulator(result);
1153 __ Dispatch();
1154}
1155
1156
1157// New <constructor> <first_arg> <arg_count>
1158//
1159// Call operator new with |constructor| and the first argument in
1160// register |first_arg| and |arg_count| arguments in subsequent
1161//
1162void Interpreter::DoNew(compiler::InterpreterAssembler* assembler) {
1163 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
1164 Node* constructor_reg = __ BytecodeOperandReg(0);
1165 Node* constructor = __ LoadRegister(constructor_reg);
1166 Node* first_arg_reg = __ BytecodeOperandReg(1);
1167 Node* first_arg = __ RegisterLocation(first_arg_reg);
1168 Node* args_count = __ BytecodeOperandCount(2);
1169 Node* result =
1170 __ CallConstruct(constructor, constructor, first_arg, args_count);
1171 __ SetAccumulator(result);
1172 __ Dispatch();
1173}
1174
1175
1176// TestEqual <src>
1177//
1178// Test if the value in the <src> register equals the accumulator.
1179void Interpreter::DoTestEqual(compiler::InterpreterAssembler* assembler) {
1180 DoBinaryOp(Runtime::kInterpreterEquals, assembler);
1181}
1182
1183
1184// TestNotEqual <src>
1185//
1186// Test if the value in the <src> register is not equal to the accumulator.
1187void Interpreter::DoTestNotEqual(compiler::InterpreterAssembler* assembler) {
1188 DoBinaryOp(Runtime::kInterpreterNotEquals, assembler);
1189}
1190
1191
1192// TestEqualStrict <src>
1193//
1194// Test if the value in the <src> register is strictly equal to the accumulator.
1195void Interpreter::DoTestEqualStrict(compiler::InterpreterAssembler* assembler) {
1196 DoBinaryOp(Runtime::kInterpreterStrictEquals, assembler);
1197}
1198
1199
1200// TestNotEqualStrict <src>
1201//
1202// Test if the value in the <src> register is not strictly equal to the
1203// accumulator.
1204void Interpreter::DoTestNotEqualStrict(
1205 compiler::InterpreterAssembler* assembler) {
1206 DoBinaryOp(Runtime::kInterpreterStrictNotEquals, assembler);
1207}
1208
1209
1210// TestLessThan <src>
1211//
1212// Test if the value in the <src> register is less than the accumulator.
1213void Interpreter::DoTestLessThan(compiler::InterpreterAssembler* assembler) {
1214 DoBinaryOp(Runtime::kInterpreterLessThan, assembler);
1215}
1216
1217
1218// TestGreaterThan <src>
1219//
1220// Test if the value in the <src> register is greater than the accumulator.
1221void Interpreter::DoTestGreaterThan(compiler::InterpreterAssembler* assembler) {
1222 DoBinaryOp(Runtime::kInterpreterGreaterThan, assembler);
1223}
1224
1225
1226// TestLessThanOrEqual <src>
1227//
1228// Test if the value in the <src> register is less than or equal to the
1229// accumulator.
1230void Interpreter::DoTestLessThanOrEqual(
1231 compiler::InterpreterAssembler* assembler) {
1232 DoBinaryOp(Runtime::kInterpreterLessThanOrEqual, assembler);
1233}
1234
1235
1236// TestGreaterThanOrEqual <src>
1237//
1238// Test if the value in the <src> register is greater than or equal to the
1239// accumulator.
1240void Interpreter::DoTestGreaterThanOrEqual(
1241 compiler::InterpreterAssembler* assembler) {
1242 DoBinaryOp(Runtime::kInterpreterGreaterThanOrEqual, assembler);
1243}
1244
1245
1246// TestIn <src>
1247//
1248// Test if the object referenced by the register operand is a property of the
1249// object referenced by the accumulator.
1250void Interpreter::DoTestIn(compiler::InterpreterAssembler* assembler) {
1251 DoBinaryOp(Runtime::kHasProperty, assembler);
1252}
1253
1254
1255// TestInstanceOf <src>
1256//
1257// Test if the object referenced by the <src> register is an an instance of type
1258// referenced by the accumulator.
1259void Interpreter::DoTestInstanceOf(compiler::InterpreterAssembler* assembler) {
1260 DoBinaryOp(Runtime::kInstanceOf, assembler);
1261}
1262
1263
1264// ToName
1265//
1266// Cast the object referenced by the accumulator to a name.
1267void Interpreter::DoToName(compiler::InterpreterAssembler* assembler) {
1268 Node* accumulator = __ GetAccumulator();
1269 Node* result = __ CallRuntime(Runtime::kToName, accumulator);
1270 __ SetAccumulator(result);
1271 __ Dispatch();
1272}
1273
1274
1275// ToNumber
1276//
1277// Cast the object referenced by the accumulator to a number.
1278void Interpreter::DoToNumber(compiler::InterpreterAssembler* assembler) {
1279 Node* accumulator = __ GetAccumulator();
1280 Node* result = __ CallRuntime(Runtime::kToNumber, accumulator);
1281 __ SetAccumulator(result);
1282 __ Dispatch();
1283}
1284
1285
1286// ToObject
1287//
1288// Cast the object referenced by the accumulator to a JSObject.
1289void Interpreter::DoToObject(compiler::InterpreterAssembler* assembler) {
1290 Node* accumulator = __ GetAccumulator();
1291 Node* result = __ CallRuntime(Runtime::kToObject, accumulator);
1292 __ SetAccumulator(result);
1293 __ Dispatch();
1294}
1295
1296
1297// Jump <imm8>
1298//
1299// Jump by number of bytes represented by the immediate operand |imm8|.
1300void Interpreter::DoJump(compiler::InterpreterAssembler* assembler) {
1301 Node* relative_jump = __ BytecodeOperandImm(0);
1302 __ Jump(relative_jump);
1303}
1304
1305
1306// JumpConstant <idx8>
1307//
1308// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool.
1309void Interpreter::DoJumpConstant(compiler::InterpreterAssembler* assembler) {
1310 Node* index = __ BytecodeOperandIdx(0);
1311 Node* constant = __ LoadConstantPoolEntry(index);
1312 Node* relative_jump = __ SmiUntag(constant);
1313 __ Jump(relative_jump);
1314}
1315
1316
1317// JumpConstantWide <idx16>
1318//
1319// Jump by number of bytes in the Smi in the |idx16| entry in the
1320// constant pool.
1321void Interpreter::DoJumpConstantWide(
1322 compiler::InterpreterAssembler* assembler) {
1323 DoJumpConstant(assembler);
1324}
1325
1326
1327// JumpIfTrue <imm8>
1328//
1329// Jump by number of bytes represented by an immediate operand if the
1330// accumulator contains true.
1331void Interpreter::DoJumpIfTrue(compiler::InterpreterAssembler* assembler) {
1332 Node* accumulator = __ GetAccumulator();
1333 Node* relative_jump = __ BytecodeOperandImm(0);
1334 Node* true_value = __ BooleanConstant(true);
1335 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1336}
1337
1338
1339// JumpIfTrueConstant <idx8>
1340//
1341// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1342// if the accumulator contains true.
1343void Interpreter::DoJumpIfTrueConstant(
1344 compiler::InterpreterAssembler* assembler) {
1345 Node* accumulator = __ GetAccumulator();
1346 Node* index = __ BytecodeOperandIdx(0);
1347 Node* constant = __ LoadConstantPoolEntry(index);
1348 Node* relative_jump = __ SmiUntag(constant);
1349 Node* true_value = __ BooleanConstant(true);
1350 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1351}
1352
1353
1354// JumpIfTrueConstantWide <idx16>
1355//
1356// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1357// if the accumulator contains true.
1358void Interpreter::DoJumpIfTrueConstantWide(
1359 compiler::InterpreterAssembler* assembler) {
1360 DoJumpIfTrueConstant(assembler);
1361}
1362
1363
1364// JumpIfFalse <imm8>
1365//
1366// Jump by number of bytes represented by an immediate operand if the
1367// accumulator contains false.
1368void Interpreter::DoJumpIfFalse(compiler::InterpreterAssembler* assembler) {
1369 Node* accumulator = __ GetAccumulator();
1370 Node* relative_jump = __ BytecodeOperandImm(0);
1371 Node* false_value = __ BooleanConstant(false);
1372 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1373}
1374
1375
1376// JumpIfFalseConstant <idx8>
1377//
1378// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1379// if the accumulator contains false.
1380void Interpreter::DoJumpIfFalseConstant(
1381 compiler::InterpreterAssembler* assembler) {
1382 Node* accumulator = __ GetAccumulator();
1383 Node* index = __ BytecodeOperandIdx(0);
1384 Node* constant = __ LoadConstantPoolEntry(index);
1385 Node* relative_jump = __ SmiUntag(constant);
1386 Node* false_value = __ BooleanConstant(false);
1387 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1388}
1389
1390
1391// JumpIfFalseConstant <idx16>
1392//
1393// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1394// if the accumulator contains false.
1395void Interpreter::DoJumpIfFalseConstantWide(
1396 compiler::InterpreterAssembler* assembler) {
1397 DoJumpIfFalseConstant(assembler);
1398}
1399
1400
1401// JumpIfToBooleanTrue <imm8>
1402//
1403// Jump by number of bytes represented by an immediate operand if the object
1404// referenced by the accumulator is true when the object is cast to boolean.
1405void Interpreter::DoJumpIfToBooleanTrue(
1406 compiler::InterpreterAssembler* assembler) {
1407 Node* accumulator = __ GetAccumulator();
1408 Node* to_boolean_value =
1409 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
1410 Node* relative_jump = __ BytecodeOperandImm(0);
1411 Node* true_value = __ BooleanConstant(true);
1412 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1413}
1414
1415
1416// JumpIfToBooleanTrueConstant <idx8>
1417//
1418// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1419// if the object referenced by the accumulator is true when the object is cast
1420// to boolean.
1421void Interpreter::DoJumpIfToBooleanTrueConstant(
1422 compiler::InterpreterAssembler* assembler) {
1423 Node* accumulator = __ GetAccumulator();
1424 Node* to_boolean_value =
1425 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
1426 Node* index = __ BytecodeOperandIdx(0);
1427 Node* constant = __ LoadConstantPoolEntry(index);
1428 Node* relative_jump = __ SmiUntag(constant);
1429 Node* true_value = __ BooleanConstant(true);
1430 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1431}
1432
1433
1434// JumpIfToBooleanTrueConstantWide <idx16>
1435//
1436// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1437// if the object referenced by the accumulator is true when the object is cast
1438// to boolean.
1439void Interpreter::DoJumpIfToBooleanTrueConstantWide(
1440 compiler::InterpreterAssembler* assembler) {
1441 DoJumpIfToBooleanTrueConstant(assembler);
1442}
1443
1444
1445// JumpIfToBooleanFalse <imm8>
1446//
1447// Jump by number of bytes represented by an immediate operand if the object
1448// referenced by the accumulator is false when the object is cast to boolean.
1449void Interpreter::DoJumpIfToBooleanFalse(
1450 compiler::InterpreterAssembler* assembler) {
1451 Node* accumulator = __ GetAccumulator();
1452 Node* to_boolean_value =
1453 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
1454 Node* relative_jump = __ BytecodeOperandImm(0);
1455 Node* false_value = __ BooleanConstant(false);
1456 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1457}
1458
1459
1460// JumpIfToBooleanFalseConstant <idx8>
1461//
1462// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1463// if the object referenced by the accumulator is false when the object is cast
1464// to boolean.
1465void Interpreter::DoJumpIfToBooleanFalseConstant(
1466 compiler::InterpreterAssembler* assembler) {
1467 Node* accumulator = __ GetAccumulator();
1468 Node* to_boolean_value =
1469 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator);
1470 Node* index = __ BytecodeOperandIdx(0);
1471 Node* constant = __ LoadConstantPoolEntry(index);
1472 Node* relative_jump = __ SmiUntag(constant);
1473 Node* false_value = __ BooleanConstant(false);
1474 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1475}
1476
1477
1478// JumpIfToBooleanFalseConstantWide <idx16>
1479//
1480// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1481// if the object referenced by the accumulator is false when the object is cast
1482// to boolean.
1483void Interpreter::DoJumpIfToBooleanFalseConstantWide(
1484 compiler::InterpreterAssembler* assembler) {
1485 DoJumpIfToBooleanFalseConstant(assembler);
1486}
1487
1488
1489// JumpIfNull <imm8>
1490//
1491// Jump by number of bytes represented by an immediate operand if the object
1492// referenced by the accumulator is the null constant.
1493void Interpreter::DoJumpIfNull(compiler::InterpreterAssembler* assembler) {
1494 Node* accumulator = __ GetAccumulator();
1495 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1496 Node* relative_jump = __ BytecodeOperandImm(0);
1497 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1498}
1499
1500
1501// JumpIfNullConstant <idx8>
1502//
1503// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1504// if the object referenced by the accumulator is the null constant.
1505void Interpreter::DoJumpIfNullConstant(
1506 compiler::InterpreterAssembler* assembler) {
1507 Node* accumulator = __ GetAccumulator();
1508 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1509 Node* index = __ BytecodeOperandIdx(0);
1510 Node* constant = __ LoadConstantPoolEntry(index);
1511 Node* relative_jump = __ SmiUntag(constant);
1512 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1513}
1514
1515
1516// JumpIfNullConstantWide <idx16>
1517//
1518// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1519// if the object referenced by the accumulator is the null constant.
1520void Interpreter::DoJumpIfNullConstantWide(
1521 compiler::InterpreterAssembler* assembler) {
1522 DoJumpIfNullConstant(assembler);
1523}
1524
1525
1526// jumpifundefined <imm8>
1527//
1528// Jump by number of bytes represented by an immediate operand if the object
1529// referenced by the accumulator is the undefined constant.
1530void Interpreter::DoJumpIfUndefined(compiler::InterpreterAssembler* assembler) {
1531 Node* accumulator = __ GetAccumulator();
1532 Node* undefined_value =
1533 __ HeapConstant(isolate_->factory()->undefined_value());
1534 Node* relative_jump = __ BytecodeOperandImm(0);
1535 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1536}
1537
1538
1539// JumpIfUndefinedConstant <idx8>
1540//
1541// Jump by number of bytes in the Smi in the |idx8| entry in the constant pool
1542// if the object referenced by the accumulator is the undefined constant.
1543void Interpreter::DoJumpIfUndefinedConstant(
1544 compiler::InterpreterAssembler* assembler) {
1545 Node* accumulator = __ GetAccumulator();
1546 Node* undefined_value =
1547 __ HeapConstant(isolate_->factory()->undefined_value());
1548 Node* index = __ BytecodeOperandIdx(0);
1549 Node* constant = __ LoadConstantPoolEntry(index);
1550 Node* relative_jump = __ SmiUntag(constant);
1551 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1552}
1553
1554
1555// JumpIfUndefinedConstantWide <idx16>
1556//
1557// Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1558// if the object referenced by the accumulator is the undefined constant.
1559void Interpreter::DoJumpIfUndefinedConstantWide(
1560 compiler::InterpreterAssembler* assembler) {
1561 DoJumpIfUndefinedConstant(assembler);
1562}
1563
1564
1565void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id,
1566 compiler::InterpreterAssembler* assembler) {
1567 Node* index = __ BytecodeOperandIdx(0);
1568 Node* constant_elements = __ LoadConstantPoolEntry(index);
1569 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1570 Node* literal_index = __ SmiTag(literal_index_raw);
1571 Node* flags_raw = __ BytecodeOperandImm(2);
1572 Node* flags = __ SmiTag(flags_raw);
1573 Node* closure = __ LoadRegister(Register::function_closure());
1574 Node* result = __ CallRuntime(function_id, closure, literal_index,
1575 constant_elements, flags);
1576 __ SetAccumulator(result);
1577 __ Dispatch();
1578}
1579
1580
1581// CreateRegExpLiteral <pattern_idx> <literal_idx> <flags>
1582//
1583// Creates a regular expression literal for literal index <literal_idx> with
1584// <flags> and the pattern in <pattern_idx>.
1585void Interpreter::DoCreateRegExpLiteral(
1586 compiler::InterpreterAssembler* assembler) {
1587 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler);
1588}
1589
1590
1591// CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags>
1592//
1593// Creates a regular expression literal for literal index <literal_idx> with
1594// <flags> and the pattern in <pattern_idx>.
1595void Interpreter::DoCreateRegExpLiteralWide(
1596 compiler::InterpreterAssembler* assembler) {
1597 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler);
1598}
1599
1600
1601// CreateArrayLiteral <element_idx> <literal_idx> <flags>
1602//
1603// Creates an array literal for literal index <literal_idx> with flags <flags>
1604// and constant elements in <element_idx>.
1605void Interpreter::DoCreateArrayLiteral(
1606 compiler::InterpreterAssembler* assembler) {
1607 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1608}
1609
1610
1611// CreateArrayLiteralWide <element_idx> <literal_idx> <flags>
1612//
1613// Creates an array literal for literal index <literal_idx> with flags <flags>
1614// and constant elements in <element_idx>.
1615void Interpreter::DoCreateArrayLiteralWide(
1616 compiler::InterpreterAssembler* assembler) {
1617 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1618}
1619
1620
1621// CreateObjectLiteral <element_idx> <literal_idx> <flags>
1622//
1623// Creates an object literal for literal index <literal_idx> with flags <flags>
1624// and constant elements in <element_idx>.
1625void Interpreter::DoCreateObjectLiteral(
1626 compiler::InterpreterAssembler* assembler) {
1627 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1628}
1629
1630
1631// CreateObjectLiteralWide <element_idx> <literal_idx> <flags>
1632//
1633// Creates an object literal for literal index <literal_idx> with flags <flags>
1634// and constant elements in <element_idx>.
1635void Interpreter::DoCreateObjectLiteralWide(
1636 compiler::InterpreterAssembler* assembler) {
1637 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1638}
1639
1640
1641// CreateClosure <index> <tenured>
1642//
1643// Creates a new closure for SharedFunctionInfo at position |index| in the
1644// constant pool and with the PretenureFlag <tenured>.
1645void Interpreter::DoCreateClosure(compiler::InterpreterAssembler* assembler) {
1646 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of
1647 // calling into the runtime.
1648 Node* index = __ BytecodeOperandIdx(0);
1649 Node* shared = __ LoadConstantPoolEntry(index);
1650 Node* tenured_raw = __ BytecodeOperandImm(1);
1651 Node* tenured = __ SmiTag(tenured_raw);
1652 Node* result =
1653 __ CallRuntime(Runtime::kInterpreterNewClosure, shared, tenured);
1654 __ SetAccumulator(result);
1655 __ Dispatch();
1656}
1657
1658
1659// CreateClosureWide <index> <tenured>
1660//
1661// Creates a new closure for SharedFunctionInfo at position |index| in the
1662// constant pool and with the PretenureFlag <tenured>.
1663void Interpreter::DoCreateClosureWide(
1664 compiler::InterpreterAssembler* assembler) {
1665 return DoCreateClosure(assembler);
1666}
1667
1668
1669// CreateMappedArguments
1670//
1671// Creates a new mapped arguments object.
1672void Interpreter::DoCreateMappedArguments(
1673 compiler::InterpreterAssembler* assembler) {
1674 Node* closure = __ LoadRegister(Register::function_closure());
1675 Node* result = __ CallRuntime(Runtime::kNewSloppyArguments_Generic, closure);
1676 __ SetAccumulator(result);
1677 __ Dispatch();
1678}
1679
1680
1681// CreateUnmappedArguments
1682//
1683// Creates a new unmapped arguments object.
1684void Interpreter::DoCreateUnmappedArguments(
1685 compiler::InterpreterAssembler* assembler) {
1686 Node* closure = __ LoadRegister(Register::function_closure());
1687 Node* result = __ CallRuntime(Runtime::kNewStrictArguments_Generic, closure);
1688 __ SetAccumulator(result);
1689 __ Dispatch();
1690}
1691
1692
1693// Throw
1694//
1695// Throws the exception in the accumulator.
1696void Interpreter::DoThrow(compiler::InterpreterAssembler* assembler) {
1697 Node* exception = __ GetAccumulator();
1698 __ CallRuntime(Runtime::kThrow, exception);
1699 // We shouldn't ever return from a throw.
1700 __ Abort(kUnexpectedReturnFromThrow);
1701}
1702
1703
1704// Return
1705//
1706// Return the value in the accumulator.
1707void Interpreter::DoReturn(compiler::InterpreterAssembler* assembler) {
1708 __ Return();
1709}
1710
1711
1712// ForInPrepare <cache_type> <cache_array> <cache_length>
1713//
1714// Returns state for for..in loop execution based on the object in the
1715// accumulator. The registers |cache_type|, |cache_array|, and
1716// |cache_length| represent output parameters.
1717void Interpreter::DoForInPrepare(compiler::InterpreterAssembler* assembler) {
1718 Node* object = __ GetAccumulator();
1719 Node* result = __ CallRuntime(Runtime::kInterpreterForInPrepare, object);
1720 for (int i = 0; i < 3; i++) {
1721 // 0 == cache_type, 1 == cache_array, 2 == cache_length
1722 Node* cache_info = __ LoadFixedArrayElement(result, i);
1723 Node* cache_info_reg = __ BytecodeOperandReg(i);
1724 __ StoreRegister(cache_info, cache_info_reg);
1725 }
1726 __ SetAccumulator(result);
1727 __ Dispatch();
1728}
1729
1730
1731// ForInNext <receiver> <cache_type> <cache_array> <index>
1732//
1733// Returns the next enumerable property in the the accumulator.
1734void Interpreter::DoForInNext(compiler::InterpreterAssembler* assembler) {
1735 Node* receiver_reg = __ BytecodeOperandReg(0);
1736 Node* receiver = __ LoadRegister(receiver_reg);
1737 Node* cache_type_reg = __ BytecodeOperandReg(1);
1738 Node* cache_type = __ LoadRegister(cache_type_reg);
1739 Node* cache_array_reg = __ BytecodeOperandReg(2);
1740 Node* cache_array = __ LoadRegister(cache_array_reg);
1741 Node* index_reg = __ BytecodeOperandReg(3);
1742 Node* index = __ LoadRegister(index_reg);
1743 Node* result = __ CallRuntime(Runtime::kForInNext, receiver, cache_array,
1744 cache_type, index);
1745 __ SetAccumulator(result);
1746 __ Dispatch();
1747}
1748
1749
1750// ForInDone <index> <cache_length>
1751//
1752// Returns true if the end of the enumerable properties has been reached.
1753void Interpreter::DoForInDone(compiler::InterpreterAssembler* assembler) {
1754 // TODO(oth): Implement directly rather than making a runtime call.
1755 Node* index_reg = __ BytecodeOperandReg(0);
1756 Node* index = __ LoadRegister(index_reg);
1757 Node* cache_length_reg = __ BytecodeOperandReg(1);
1758 Node* cache_length = __ LoadRegister(cache_length_reg);
1759 Node* result = __ CallRuntime(Runtime::kForInDone, index, cache_length);
1760 __ SetAccumulator(result);
1761 __ Dispatch();
1762}
1763
1764
1765// ForInStep <index>
1766//
1767// Increments the loop counter in register |index| and stores the result
1768// in the accumulator.
1769void Interpreter::DoForInStep(compiler::InterpreterAssembler* assembler) {
1770 // TODO(oth): Implement directly rather than making a runtime call.
1771 Node* index_reg = __ BytecodeOperandReg(0);
1772 Node* index = __ LoadRegister(index_reg);
1773 Node* result = __ CallRuntime(Runtime::kForInStep, index);
1774 __ SetAccumulator(result);
1775 __ Dispatch();
1776}
1777
1778} // namespace interpreter
1779} // namespace internal
1780} // namespace v8