blob: 9f240dd82c9b211e316b3f32bfb46f0a18efc997 [file] [log] [blame]
sgjesse@chromium.orgb302e562010-02-03 11:26:59 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "codegen-inl.h"
31#include "compiler.h"
32#include "debug.h"
33#include "full-codegen.h"
34#include "parser.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm_)
40
41// Generate code for a JS function. On entry to the function the receiver
42// and arguments have been pushed on the stack left to right. The actual
43// argument count matches the formal parameter count expected by the
44// function.
45//
46// The live registers are:
47// o r1: the JS function object being called (ie, ourselves)
48// o cp: our context
49// o fp: our caller's frame pointer
50// o sp: stack pointer
51// o lr: return address
52//
53// The function builds a JS frame. Please see JavaScriptFrameConstants in
54// frames-arm.h for its layout.
55void FullCodeGenerator::Generate(FunctionLiteral* fun, Mode mode) {
56 function_ = fun;
57 SetFunctionPosition(fun);
58
59 if (mode == PRIMARY) {
60 int locals_count = fun->scope()->num_stack_slots();
61
62 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
63 if (locals_count > 0) {
64 // Load undefined value here, so the value is ready for the loop
65 // below.
66 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
67 }
68 // Adjust fp to point to caller's fp.
69 __ add(fp, sp, Operand(2 * kPointerSize));
70
71 { Comment cmnt(masm_, "[ Allocate locals");
72 for (int i = 0; i < locals_count; i++) {
73 __ push(ip);
74 }
75 }
76
77 bool function_in_register = true;
78
79 // Possibly allocate a local context.
80 if (fun->scope()->num_heap_slots() > 0) {
81 Comment cmnt(masm_, "[ Allocate local context");
82 // Argument to NewContext is the function, which is in r1.
83 __ push(r1);
84 __ CallRuntime(Runtime::kNewContext, 1);
85 function_in_register = false;
86 // Context is returned in both r0 and cp. It replaces the context
87 // passed to us. It's saved in the stack and kept live in cp.
88 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
89 // Copy any necessary parameters into the context.
90 int num_parameters = fun->scope()->num_parameters();
91 for (int i = 0; i < num_parameters; i++) {
92 Slot* slot = fun->scope()->parameter(i)->slot();
93 if (slot != NULL && slot->type() == Slot::CONTEXT) {
94 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
95 (num_parameters - 1 - i) * kPointerSize;
96 // Load parameter from stack.
97 __ ldr(r0, MemOperand(fp, parameter_offset));
98 // Store it in the context.
99 __ mov(r1, Operand(Context::SlotOffset(slot->index())));
100 __ str(r0, MemOperand(cp, r1));
101 // Update the write barrier. This clobbers all involved
102 // registers, so we have use a third register to avoid
103 // clobbering cp.
104 __ mov(r2, Operand(cp));
105 __ RecordWrite(r2, r1, r0);
106 }
107 }
108 }
109
110 Variable* arguments = fun->scope()->arguments()->AsVariable();
111 if (arguments != NULL) {
112 // Function uses arguments object.
113 Comment cmnt(masm_, "[ Allocate arguments object");
114 if (!function_in_register) {
115 // Load this again, if it's used by the local context below.
116 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
117 } else {
118 __ mov(r3, r1);
119 }
120 // Receiver is just before the parameters on the caller's stack.
121 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset +
122 fun->num_parameters() * kPointerSize));
123 __ mov(r1, Operand(Smi::FromInt(fun->num_parameters())));
124 __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit());
125
126 // Arguments to ArgumentsAccessStub:
127 // function, receiver address, parameter count.
128 // The stub will rewrite receiever and parameter count if the previous
129 // stack frame was an arguments adapter frame.
130 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
131 __ CallStub(&stub);
132 // Duplicate the value; move-to-slot operation might clobber registers.
133 __ mov(r3, r0);
134 Move(arguments->slot(), r0, r1, r2);
135 Slot* dot_arguments_slot =
136 fun->scope()->arguments_shadow()->AsVariable()->slot();
137 Move(dot_arguments_slot, r3, r1, r2);
138 }
139 }
140
141 // Check the stack for overflow or break request.
142 // Put the lr setup instruction in the delay slot. The kInstrSize is
143 // added to the implicit 8 byte offset that always applies to operations
144 // with pc and gives a return address 12 bytes down.
145 { Comment cmnt(masm_, "[ Stack check");
146 __ LoadRoot(r2, Heap::kStackLimitRootIndex);
147 __ add(lr, pc, Operand(Assembler::kInstrSize));
148 __ cmp(sp, Operand(r2));
149 StackCheckStub stub;
150 __ mov(pc,
151 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
152 RelocInfo::CODE_TARGET),
153 LeaveCC,
154 lo);
155 }
156
157 { Comment cmnt(masm_, "[ Declarations");
158 VisitDeclarations(fun->scope()->declarations());
159 }
160
161 if (FLAG_trace) {
162 __ CallRuntime(Runtime::kTraceEnter, 0);
163 }
164
165 { Comment cmnt(masm_, "[ Body");
166 ASSERT(loop_depth() == 0);
167 VisitStatements(fun->body());
168 ASSERT(loop_depth() == 0);
169 }
170
171 { Comment cmnt(masm_, "[ return <undefined>;");
172 // Emit a 'return undefined' in case control fell off the end of the
173 // body.
174 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
175 }
176 EmitReturnSequence(function_->end_position());
177}
178
179
180void FullCodeGenerator::EmitReturnSequence(int position) {
181 Comment cmnt(masm_, "[ Return sequence");
182 if (return_label_.is_bound()) {
183 __ b(&return_label_);
184 } else {
185 __ bind(&return_label_);
186 if (FLAG_trace) {
187 // Push the return value on the stack as the parameter.
188 // Runtime::TraceExit returns its parameter in r0.
189 __ push(r0);
190 __ CallRuntime(Runtime::kTraceExit, 1);
191 }
192
193 // Add a label for checking the size of the code used for returning.
194 Label check_exit_codesize;
195 masm_->bind(&check_exit_codesize);
196
197 // Calculate the exact length of the return sequence and make sure that
198 // the constant pool is not emitted inside of the return sequence.
199 int num_parameters = function_->scope()->num_parameters();
200 int32_t sp_delta = (num_parameters + 1) * kPointerSize;
201 int return_sequence_length = Assembler::kJSReturnSequenceLength;
202 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) {
203 // Additional mov instruction generated.
204 return_sequence_length++;
205 }
206 masm_->BlockConstPoolFor(return_sequence_length);
207
208 CodeGenerator::RecordPositions(masm_, position);
209 __ RecordJSReturn();
210 __ mov(sp, fp);
211 __ ldm(ia_w, sp, fp.bit() | lr.bit());
212 __ add(sp, sp, Operand(sp_delta));
213 __ Jump(lr);
214
215 // Check that the size of the code used for returning matches what is
216 // expected by the debugger. The add instruction above is an addressing
217 // mode 1 instruction where there are restrictions on which immediate values
218 // can be encoded in the instruction and which immediate values requires
219 // use of an additional instruction for moving the immediate to a temporary
220 // register.
221 ASSERT_EQ(return_sequence_length,
222 masm_->InstructionsGeneratedSince(&check_exit_codesize));
223 }
224}
225
226
227void FullCodeGenerator::Apply(Expression::Context context, Register reg) {
228 switch (context) {
229 case Expression::kUninitialized:
230 UNREACHABLE();
231
232 case Expression::kEffect:
233 // Nothing to do.
234 break;
235
236 case Expression::kValue:
237 // Move value into place.
238 switch (location_) {
239 case kAccumulator:
240 if (!reg.is(result_register())) __ mov(result_register(), reg);
241 break;
242 case kStack:
243 __ push(reg);
244 break;
245 }
246 break;
247
248 case Expression::kValueTest:
249 case Expression::kTestValue:
250 // Push an extra copy of the value in case it's needed.
251 __ push(reg);
252 // Fall through.
253
254 case Expression::kTest:
255 // We always call the runtime on ARM, so push the value as argument.
256 __ push(reg);
257 DoTest(context);
258 break;
259 }
260}
261
262
263void FullCodeGenerator::Apply(Expression::Context context, Slot* slot) {
264 switch (context) {
265 case Expression::kUninitialized:
266 UNREACHABLE();
267 case Expression::kEffect:
268 // Nothing to do.
269 break;
270 case Expression::kValue:
271 case Expression::kTest:
272 case Expression::kValueTest:
273 case Expression::kTestValue:
274 // On ARM we have to move the value into a register to do anything
275 // with it.
276 Move(result_register(), slot);
277 Apply(context, result_register());
278 break;
279 }
280}
281
282
283void FullCodeGenerator::Apply(Expression::Context context, Literal* lit) {
284 switch (context) {
285 case Expression::kUninitialized:
286 UNREACHABLE();
287 case Expression::kEffect:
288 break;
289 // Nothing to do.
290 case Expression::kValue:
291 case Expression::kTest:
292 case Expression::kValueTest:
293 case Expression::kTestValue:
294 // On ARM we have to move the value into a register to do anything
295 // with it.
296 __ mov(result_register(), Operand(lit->handle()));
297 Apply(context, result_register());
298 break;
299 }
300}
301
302
303void FullCodeGenerator::ApplyTOS(Expression::Context context) {
304 switch (context) {
305 case Expression::kUninitialized:
306 UNREACHABLE();
307
308 case Expression::kEffect:
309 __ Drop(1);
310 break;
311
312 case Expression::kValue:
313 switch (location_) {
314 case kAccumulator:
315 __ pop(result_register());
316 break;
317 case kStack:
318 break;
319 }
320 break;
321
322 case Expression::kValueTest:
323 case Expression::kTestValue:
324 // Duplicate the value on the stack in case it's needed.
325 __ ldr(ip, MemOperand(sp));
326 __ push(ip);
327 // Fall through.
328
329 case Expression::kTest:
330 DoTest(context);
331 break;
332 }
333}
334
335
336void FullCodeGenerator::DropAndApply(int count,
337 Expression::Context context,
338 Register reg) {
339 ASSERT(count > 0);
340 ASSERT(!reg.is(sp));
341 switch (context) {
342 case Expression::kUninitialized:
343 UNREACHABLE();
344
345 case Expression::kEffect:
346 __ Drop(count);
347 break;
348
349 case Expression::kValue:
350 switch (location_) {
351 case kAccumulator:
352 __ Drop(count);
353 if (!reg.is(result_register())) __ mov(result_register(), reg);
354 break;
355 case kStack:
356 if (count > 1) __ Drop(count - 1);
357 __ str(reg, MemOperand(sp));
358 break;
359 }
360 break;
361
362 case Expression::kTest:
363 if (count > 1) __ Drop(count - 1);
364 __ str(reg, MemOperand(sp));
365 DoTest(context);
366 break;
367
368 case Expression::kValueTest:
369 case Expression::kTestValue:
370 if (count == 1) {
371 __ str(reg, MemOperand(sp));
372 __ push(reg);
373 } else { // count > 1
374 __ Drop(count - 2);
375 __ str(reg, MemOperand(sp, kPointerSize));
376 __ str(reg, MemOperand(sp));
377 }
378 DoTest(context);
379 break;
380 }
381}
382
383
384void FullCodeGenerator::Apply(Expression::Context context,
385 Label* materialize_true,
386 Label* materialize_false) {
387 switch (context) {
388 case Expression::kUninitialized:
389
390 case Expression::kEffect:
391 ASSERT_EQ(materialize_true, materialize_false);
392 __ bind(materialize_true);
393 break;
394
395 case Expression::kValue: {
396 Label done;
397 __ bind(materialize_true);
398 __ mov(result_register(), Operand(Factory::true_value()));
399 __ jmp(&done);
400 __ bind(materialize_false);
401 __ mov(result_register(), Operand(Factory::false_value()));
402 __ bind(&done);
403 switch (location_) {
404 case kAccumulator:
405 break;
406 case kStack:
407 __ push(result_register());
408 break;
409 }
410 break;
411 }
412
413 case Expression::kTest:
414 break;
415
416 case Expression::kValueTest:
417 __ bind(materialize_true);
418 __ mov(result_register(), Operand(Factory::true_value()));
419 switch (location_) {
420 case kAccumulator:
421 break;
422 case kStack:
423 __ push(result_register());
424 break;
425 }
426 __ jmp(true_label_);
427 break;
428
429 case Expression::kTestValue:
430 __ bind(materialize_false);
431 __ mov(result_register(), Operand(Factory::false_value()));
432 switch (location_) {
433 case kAccumulator:
434 break;
435 case kStack:
436 __ push(result_register());
437 break;
438 }
439 __ jmp(false_label_);
440 break;
441 }
442}
443
444
445void FullCodeGenerator::DoTest(Expression::Context context) {
446 // The value to test is pushed on the stack, and duplicated on the stack
447 // if necessary (for value/test and test/value contexts).
448 ASSERT_NE(NULL, true_label_);
449 ASSERT_NE(NULL, false_label_);
450
451 // Call the runtime to find the boolean value of the source and then
452 // translate it into control flow to the pair of labels.
453 __ CallRuntime(Runtime::kToBool, 1);
454 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
455 __ cmp(r0, ip);
456
457 // Complete based on the context.
458 switch (context) {
459 case Expression::kUninitialized:
460 case Expression::kEffect:
461 case Expression::kValue:
462 UNREACHABLE();
463
464 case Expression::kTest:
465 __ b(eq, true_label_);
466 __ jmp(false_label_);
467 break;
468
469 case Expression::kValueTest: {
470 Label discard;
471 switch (location_) {
472 case kAccumulator:
473 __ b(ne, &discard);
474 __ pop(result_register());
475 __ jmp(true_label_);
476 break;
477 case kStack:
478 __ b(eq, true_label_);
479 break;
480 }
481 __ bind(&discard);
482 __ Drop(1);
483 __ jmp(false_label_);
484 break;
485 }
486
487 case Expression::kTestValue: {
488 Label discard;
489 switch (location_) {
490 case kAccumulator:
491 __ b(eq, &discard);
492 __ pop(result_register());
493 __ jmp(false_label_);
494 break;
495 case kStack:
496 __ b(ne, false_label_);
497 break;
498 }
499 __ bind(&discard);
500 __ Drop(1);
501 __ jmp(true_label_);
502 break;
503 }
504 }
505}
506
507
508MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
509 switch (slot->type()) {
510 case Slot::PARAMETER:
511 case Slot::LOCAL:
512 return MemOperand(fp, SlotOffset(slot));
513 case Slot::CONTEXT: {
514 int context_chain_length =
515 function_->scope()->ContextChainLength(slot->var()->scope());
516 __ LoadContext(scratch, context_chain_length);
517 return CodeGenerator::ContextOperand(scratch, slot->index());
518 }
519 case Slot::LOOKUP:
520 UNREACHABLE();
521 }
522 UNREACHABLE();
523 return MemOperand(r0, 0);
524}
525
526
527void FullCodeGenerator::Move(Register destination, Slot* source) {
528 // Use destination as scratch.
529 MemOperand slot_operand = EmitSlotSearch(source, destination);
530 __ ldr(destination, slot_operand);
531}
532
533
534void FullCodeGenerator::Move(Slot* dst,
535 Register src,
536 Register scratch1,
537 Register scratch2) {
538 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
539 ASSERT(!scratch1.is(src) && !scratch2.is(src));
540 MemOperand location = EmitSlotSearch(dst, scratch1);
541 __ str(src, location);
542 // Emit the write barrier code if the location is in the heap.
543 if (dst->type() == Slot::CONTEXT) {
544 __ mov(scratch2, Operand(Context::SlotOffset(dst->index())));
545 __ RecordWrite(scratch1, scratch2, src);
546 }
547}
548
549
550void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
551 Comment cmnt(masm_, "[ Declaration");
552 Variable* var = decl->proxy()->var();
553 ASSERT(var != NULL); // Must have been resolved.
554 Slot* slot = var->slot();
555 Property* prop = var->AsProperty();
556
557 if (slot != NULL) {
558 switch (slot->type()) {
559 case Slot::PARAMETER:
560 case Slot::LOCAL:
561 if (decl->mode() == Variable::CONST) {
562 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
563 __ str(ip, MemOperand(fp, SlotOffset(slot)));
564 } else if (decl->fun() != NULL) {
565 VisitForValue(decl->fun(), kAccumulator);
566 __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
567 }
568 break;
569
570 case Slot::CONTEXT:
571 // We bypass the general EmitSlotSearch because we know more about
572 // this specific context.
573
574 // The variable in the decl always resides in the current context.
575 ASSERT_EQ(0, function_->scope()->ContextChainLength(var->scope()));
576 if (FLAG_debug_code) {
577 // Check if we have the correct context pointer.
578 __ ldr(r1,
579 CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX));
580 __ cmp(r1, cp);
581 __ Check(eq, "Unexpected declaration in current context.");
582 }
583 if (decl->mode() == Variable::CONST) {
584 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
585 __ str(ip, CodeGenerator::ContextOperand(cp, slot->index()));
586 // No write barrier since the_hole_value is in old space.
587 } else if (decl->fun() != NULL) {
588 VisitForValue(decl->fun(), kAccumulator);
589 __ str(result_register(),
590 CodeGenerator::ContextOperand(cp, slot->index()));
591 int offset = Context::SlotOffset(slot->index());
592 __ mov(r2, Operand(offset));
593 // We know that we have written a function, which is not a smi.
594 __ mov(r1, Operand(cp));
595 __ RecordWrite(r1, r2, result_register());
596 }
597 break;
598
599 case Slot::LOOKUP: {
600 __ mov(r2, Operand(var->name()));
601 // Declaration nodes are always introduced in one of two modes.
602 ASSERT(decl->mode() == Variable::VAR ||
603 decl->mode() == Variable::CONST);
604 PropertyAttributes attr =
605 (decl->mode() == Variable::VAR) ? NONE : READ_ONLY;
606 __ mov(r1, Operand(Smi::FromInt(attr)));
607 // Push initial value, if any.
608 // Note: For variables we must not push an initial value (such as
609 // 'undefined') because we may have a (legal) redeclaration and we
610 // must not destroy the current value.
611 if (decl->mode() == Variable::CONST) {
612 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
613 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit());
614 } else if (decl->fun() != NULL) {
615 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit());
616 // Push initial value for function declaration.
617 VisitForValue(decl->fun(), kStack);
618 } else {
619 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
620 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit());
621 }
622 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
623 break;
624 }
625 }
626
627 } else if (prop != NULL) {
628 if (decl->fun() != NULL || decl->mode() == Variable::CONST) {
629 // We are declaring a function or constant that rewrites to a
630 // property. Use (keyed) IC to set the initial value.
631 VisitForValue(prop->obj(), kStack);
632 VisitForValue(prop->key(), kStack);
633
634 if (decl->fun() != NULL) {
635 VisitForValue(decl->fun(), kAccumulator);
636 } else {
637 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex);
638 }
639
640 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
641 __ Call(ic, RelocInfo::CODE_TARGET);
642
643 // Value in r0 is ignored (declarations are statements). Receiver
644 // and key on stack are discarded.
645 __ Drop(2);
646 }
647 }
648}
649
650
651void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
652 // Call the runtime to declare the globals.
653 // The context is the first argument.
654 __ mov(r1, Operand(pairs));
655 __ mov(r0, Operand(Smi::FromInt(is_eval_ ? 1 : 0)));
656 __ stm(db_w, sp, cp.bit() | r1.bit() | r0.bit());
657 __ CallRuntime(Runtime::kDeclareGlobals, 3);
658 // Return value is ignored.
659}
660
661
662void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
663 Comment cmnt(masm_, "[ FunctionLiteral");
664
665 // Build the function boilerplate and instantiate it.
666 Handle<JSFunction> boilerplate =
667 Compiler::BuildBoilerplate(expr, script_, this);
668 if (HasStackOverflow()) return;
669
670 ASSERT(boilerplate->IsBoilerplate());
671
672 // Create a new closure.
673 __ mov(r0, Operand(boilerplate));
674 __ stm(db_w, sp, cp.bit() | r0.bit());
675 __ CallRuntime(Runtime::kNewClosure, 2);
676 Apply(context_, r0);
677}
678
679
680void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
681 Comment cmnt(masm_, "[ VariableProxy");
682 EmitVariableLoad(expr->var(), context_);
683}
684
685
686void FullCodeGenerator::EmitVariableLoad(Variable* var,
687 Expression::Context context) {
688 // Four cases: non-this global variables, lookup slots, all other
689 // types of slots, and parameters that rewrite to explicit property
690 // accesses on the arguments object.
691 Slot* slot = var->slot();
692 Property* property = var->AsProperty();
693
694 if (var->is_global() && !var->is_this()) {
695 Comment cmnt(masm_, "Global variable");
696 // Use inline caching. Variable name is passed in r2 and the global
697 // object on the stack.
698 __ ldr(ip, CodeGenerator::GlobalObject());
699 __ push(ip);
700 __ mov(r2, Operand(var->name()));
701 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
702 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
703 DropAndApply(1, context, r0);
704
705 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
706 Comment cmnt(masm_, "Lookup slot");
707 __ mov(r1, Operand(var->name()));
708 __ stm(db_w, sp, cp.bit() | r1.bit()); // Context and name.
709 __ CallRuntime(Runtime::kLoadContextSlot, 2);
710 Apply(context, r0);
711
712 } else if (slot != NULL) {
713 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
714 ? "Context slot"
715 : "Stack slot");
716 Apply(context, slot);
717
718 } else {
719 Comment cmnt(masm_, "Rewritten parameter");
720 ASSERT_NOT_NULL(property);
721 // Rewritten parameter accesses are of the form "slot[literal]".
722
723 // Assert that the object is in a slot.
724 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
725 ASSERT_NOT_NULL(object_var);
726 Slot* object_slot = object_var->slot();
727 ASSERT_NOT_NULL(object_slot);
728
729 // Load the object.
730 Move(r2, object_slot);
731
732 // Assert that the key is a smi.
733 Literal* key_literal = property->key()->AsLiteral();
734 ASSERT_NOT_NULL(key_literal);
735 ASSERT(key_literal->handle()->IsSmi());
736
737 // Load the key.
738 __ mov(r1, Operand(key_literal->handle()));
739
740 // Push both as arguments to ic.
741 __ stm(db_w, sp, r2.bit() | r1.bit());
742
743 // Do a keyed property load.
744 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
745 __ Call(ic, RelocInfo::CODE_TARGET);
746
747 // Drop key and object left on the stack by IC, and push the result.
748 DropAndApply(2, context, r0);
749 }
750}
751
752
753void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
754 Comment cmnt(masm_, "[ RegExpLiteral");
755 Label done;
756 // Registers will be used as follows:
757 // r4 = JS function, literals array
758 // r3 = literal index
759 // r2 = RegExp pattern
760 // r1 = RegExp flags
761 // r0 = temp + return value (RegExp literal)
762 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
763 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
764 int literal_offset =
765 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
766 __ ldr(r0, FieldMemOperand(r4, literal_offset));
767 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
768 __ cmp(r0, ip);
769 __ b(ne, &done);
770 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
771 __ mov(r2, Operand(expr->pattern()));
772 __ mov(r1, Operand(expr->flags()));
773 __ stm(db_w, sp, r4.bit() | r3.bit() | r2.bit() | r1.bit());
774 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
775 __ bind(&done);
776 Apply(context_, r0);
777}
778
779
780void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
781 Comment cmnt(masm_, "[ ObjectLiteral");
782 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
783 __ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset));
784 __ mov(r1, Operand(Smi::FromInt(expr->literal_index())));
785 __ mov(r0, Operand(expr->constant_properties()));
786 __ stm(db_w, sp, r2.bit() | r1.bit() | r0.bit());
787 if (expr->depth() > 1) {
788 __ CallRuntime(Runtime::kCreateObjectLiteral, 3);
789 } else {
790 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 3);
791 }
792
793 // If result_saved is true the result is on top of the stack. If
794 // result_saved is false the result is in r0.
795 bool result_saved = false;
796
797 for (int i = 0; i < expr->properties()->length(); i++) {
798 ObjectLiteral::Property* property = expr->properties()->at(i);
799 if (property->IsCompileTimeValue()) continue;
800
801 Literal* key = property->key();
802 Expression* value = property->value();
803 if (!result_saved) {
804 __ push(r0); // Save result on stack
805 result_saved = true;
806 }
807 switch (property->kind()) {
808 case ObjectLiteral::Property::CONSTANT:
809 UNREACHABLE();
810 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
811 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
812 // Fall through.
813 case ObjectLiteral::Property::COMPUTED:
814 if (key->handle()->IsSymbol()) {
815 VisitForValue(value, kAccumulator);
816 __ mov(r2, Operand(key->handle()));
817 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
818 __ Call(ic, RelocInfo::CODE_TARGET);
819 // StoreIC leaves the receiver on the stack.
820 break;
821 }
822 // Fall through.
823 case ObjectLiteral::Property::PROTOTYPE:
824 // Duplicate receiver on stack.
825 __ ldr(r0, MemOperand(sp));
826 __ push(r0);
827 VisitForValue(key, kStack);
828 VisitForValue(value, kStack);
829 __ CallRuntime(Runtime::kSetProperty, 3);
830 break;
831 case ObjectLiteral::Property::GETTER:
832 case ObjectLiteral::Property::SETTER:
833 // Duplicate receiver on stack.
834 __ ldr(r0, MemOperand(sp));
835 __ push(r0);
836 VisitForValue(key, kStack);
837 __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
838 Smi::FromInt(1) :
839 Smi::FromInt(0)));
840 __ push(r1);
841 VisitForValue(value, kStack);
842 __ CallRuntime(Runtime::kDefineAccessor, 4);
843 break;
844 }
845 }
846
847 if (result_saved) {
848 ApplyTOS(context_);
849 } else {
850 Apply(context_, r0);
851 }
852}
853
854
855void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
856 Comment cmnt(masm_, "[ ArrayLiteral");
857 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
858 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
859 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
860 __ mov(r1, Operand(expr->constant_elements()));
861 __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit());
862 if (expr->depth() > 1) {
863 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
864 } else {
865 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
866 }
867
868 bool result_saved = false; // Is the result saved to the stack?
869
870 // Emit code to evaluate all the non-constant subexpressions and to store
871 // them into the newly cloned array.
872 ZoneList<Expression*>* subexprs = expr->values();
873 for (int i = 0, len = subexprs->length(); i < len; i++) {
874 Expression* subexpr = subexprs->at(i);
875 // If the subexpression is a literal or a simple materialized literal it
876 // is already set in the cloned array.
877 if (subexpr->AsLiteral() != NULL ||
878 CompileTimeValue::IsCompileTimeValue(subexpr)) {
879 continue;
880 }
881
882 if (!result_saved) {
883 __ push(r0);
884 result_saved = true;
885 }
886 VisitForValue(subexpr, kAccumulator);
887
888 // Store the subexpression value in the array's elements.
889 __ ldr(r1, MemOperand(sp)); // Copy of array literal.
890 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
891 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
892 __ str(result_register(), FieldMemOperand(r1, offset));
893
894 // Update the write barrier for the array store with r0 as the scratch
895 // register.
896 __ mov(r2, Operand(offset));
897 __ RecordWrite(r1, r2, result_register());
898 }
899
900 if (result_saved) {
901 ApplyTOS(context_);
902 } else {
903 Apply(context_, r0);
904 }
905}
906
907
908void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
909 SetSourcePosition(prop->position());
910 Literal* key = prop->key()->AsLiteral();
911 __ mov(r2, Operand(key->handle()));
912 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
913 __ Call(ic, RelocInfo::CODE_TARGET);
914}
915
916
917void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
918 SetSourcePosition(prop->position());
919 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
920 __ Call(ic, RelocInfo::CODE_TARGET);
921}
922
923
924void FullCodeGenerator::EmitBinaryOp(Token::Value op,
925 Expression::Context context) {
926 __ pop(r1);
927 GenericBinaryOpStub stub(op, NO_OVERWRITE);
928 __ CallStub(&stub);
929 Apply(context, r0);
930}
931
932
933void FullCodeGenerator::EmitVariableAssignment(Variable* var,
934 Expression::Context context) {
935 // Three main cases: global variables, lookup slots, and all other
936 // types of slots. Left-hand-side parameters that rewrite to
937 // explicit property accesses do not reach here.
938 ASSERT(var != NULL);
939 ASSERT(var->is_global() || var->slot() != NULL);
940
941 Slot* slot = var->slot();
942 if (var->is_global()) {
943 ASSERT(!var->is_this());
944 // Assignment to a global variable. Use inline caching for the
945 // assignment. Right-hand-side value is passed in r0, variable name in
946 // r2, and the global object on the stack.
947 __ mov(r2, Operand(var->name()));
948 __ ldr(ip, CodeGenerator::GlobalObject());
949 __ push(ip);
950 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
951 __ Call(ic, RelocInfo::CODE_TARGET);
952 // Overwrite the global object on the stack with the result if needed.
953 DropAndApply(1, context, r0);
954
955 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
956 __ push(result_register()); // Value.
957 __ mov(r1, Operand(var->name()));
958 __ stm(db_w, sp, cp.bit() | r1.bit()); // Context and name.
959 __ CallRuntime(Runtime::kStoreContextSlot, 3);
960 Apply(context, r0);
961
962 } else if (var->slot() != NULL) {
963 Slot* slot = var->slot();
964 switch (slot->type()) {
965 case Slot::LOCAL:
966 case Slot::PARAMETER:
967 __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
968 break;
969
970 case Slot::CONTEXT: {
971 MemOperand target = EmitSlotSearch(slot, r1);
972 __ str(result_register(), target);
973
974 // RecordWrite may destroy all its register arguments.
975 __ mov(r3, result_register());
976 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
977
978 __ mov(r2, Operand(offset));
979 __ RecordWrite(r1, r2, r3);
980 break;
981 }
982
983 case Slot::LOOKUP:
984 UNREACHABLE();
985 break;
986 }
987 Apply(context, result_register());
988
989 } else {
990 // Variables rewritten as properties are not treated as variables in
991 // assignments.
992 UNREACHABLE();
993 }
994}
995
996
997void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
998 // Assignment to a property, using a named store IC.
999 Property* prop = expr->target()->AsProperty();
1000 ASSERT(prop != NULL);
1001 ASSERT(prop->key()->AsLiteral() != NULL);
1002
1003 // If the assignment starts a block of assignments to the same object,
1004 // change to slow case to avoid the quadratic behavior of repeatedly
1005 // adding fast properties.
1006 if (expr->starts_initialization_block()) {
1007 __ push(result_register());
1008 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
1009 __ push(ip);
1010 __ CallRuntime(Runtime::kToSlowProperties, 1);
1011 __ pop(result_register());
1012 }
1013
1014 // Record source code position before IC call.
1015 SetSourcePosition(expr->position());
1016 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1017 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1018 __ Call(ic, RelocInfo::CODE_TARGET);
1019
1020 // If the assignment ends an initialization block, revert to fast case.
1021 if (expr->ends_initialization_block()) {
1022 __ push(r0); // Result of assignment, saved even if not needed.
1023 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is under value.
1024 __ push(ip);
1025 __ CallRuntime(Runtime::kToFastProperties, 1);
1026 __ pop(r0);
1027 }
1028
1029 DropAndApply(1, context_, r0);
1030}
1031
1032
1033void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1034 // Assignment to a property, using a keyed store IC.
1035
1036 // If the assignment starts a block of assignments to the same object,
1037 // change to slow case to avoid the quadratic behavior of repeatedly
1038 // adding fast properties.
1039 if (expr->starts_initialization_block()) {
1040 __ push(result_register());
1041 // Receiver is now under the key and value.
1042 __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
1043 __ push(ip);
1044 __ CallRuntime(Runtime::kToSlowProperties, 1);
1045 __ pop(result_register());
1046 }
1047
1048 // Record source code position before IC call.
1049 SetSourcePosition(expr->position());
1050 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1051 __ Call(ic, RelocInfo::CODE_TARGET);
1052
1053 // If the assignment ends an initialization block, revert to fast case.
1054 if (expr->ends_initialization_block()) {
1055 __ push(r0); // Result of assignment, saved even if not needed.
1056 // Receiver is under the key and value.
1057 __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
1058 __ push(ip);
1059 __ CallRuntime(Runtime::kToFastProperties, 1);
1060 __ pop(r0);
1061 }
1062
1063 // Receiver and key are still on stack.
1064 DropAndApply(2, context_, r0);
1065}
1066
1067
1068void FullCodeGenerator::VisitProperty(Property* expr) {
1069 Comment cmnt(masm_, "[ Property");
1070 Expression* key = expr->key();
1071
1072 // Evaluate receiver.
1073 VisitForValue(expr->obj(), kStack);
1074
1075 if (key->IsPropertyName()) {
1076 EmitNamedPropertyLoad(expr);
1077 // Drop receiver left on the stack by IC.
1078 DropAndApply(1, context_, r0);
1079 } else {
1080 VisitForValue(expr->key(), kStack);
1081 EmitKeyedPropertyLoad(expr);
1082 // Drop key and receiver left on the stack by IC.
1083 DropAndApply(2, context_, r0);
1084 }
1085}
1086
1087void FullCodeGenerator::EmitCallWithIC(Call* expr,
1088 Handle<Object> ignored,
1089 RelocInfo::Mode mode) {
1090 // Code common for calls using the IC.
1091 ZoneList<Expression*>* args = expr->arguments();
1092 int arg_count = args->length();
1093 for (int i = 0; i < arg_count; i++) {
1094 VisitForValue(args->at(i), kStack);
1095 }
1096 // Record source position for debugger.
1097 SetSourcePosition(expr->position());
1098 // Call the IC initialization code.
1099 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count,
1100 NOT_IN_LOOP);
1101 __ Call(ic, mode);
1102 // Restore context register.
1103 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1104 // Discard the function left on TOS.
1105 DropAndApply(1, context_, r0);
1106}
1107
1108
1109void FullCodeGenerator::EmitCallWithStub(Call* expr) {
1110 // Code common for calls using the call stub.
1111 ZoneList<Expression*>* args = expr->arguments();
1112 int arg_count = args->length();
1113 for (int i = 0; i < arg_count; i++) {
1114 VisitForValue(args->at(i), kStack);
1115 }
1116 // Record source position for debugger.
1117 SetSourcePosition(expr->position());
1118 CallFunctionStub stub(arg_count, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
1119 __ CallStub(&stub);
1120 // Restore context register.
1121 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1122 // Discard the function left on TOS.
1123 DropAndApply(1, context_, r0);
1124}
1125
1126
1127void FullCodeGenerator::VisitCall(Call* expr) {
1128 Comment cmnt(masm_, "[ Call");
1129 Expression* fun = expr->expression();
1130 Variable* var = fun->AsVariableProxy()->AsVariable();
1131
1132 if (var != NULL && var->is_possibly_eval()) {
1133 // Call to the identifier 'eval'.
1134 UNREACHABLE();
1135 } else if (var != NULL && !var->is_this() && var->is_global()) {
1136 // Call to a global variable.
1137 __ mov(r1, Operand(var->name()));
1138 // Push global object as receiver for the call IC lookup.
1139 __ ldr(r0, CodeGenerator::GlobalObject());
1140 __ stm(db_w, sp, r1.bit() | r0.bit());
1141 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
1142 } else if (var != NULL && var->slot() != NULL &&
1143 var->slot()->type() == Slot::LOOKUP) {
1144 // Call to a lookup slot.
1145 UNREACHABLE();
1146 } else if (fun->AsProperty() != NULL) {
1147 // Call to an object property.
1148 Property* prop = fun->AsProperty();
1149 Literal* key = prop->key()->AsLiteral();
1150 if (key != NULL && key->handle()->IsSymbol()) {
1151 // Call to a named property, use call IC.
1152 __ mov(r0, Operand(key->handle()));
1153 __ push(r0);
1154 VisitForValue(prop->obj(), kStack);
1155 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
1156 } else {
1157 // Call to a keyed property, use keyed load IC followed by function
1158 // call.
1159 VisitForValue(prop->obj(), kStack);
1160 VisitForValue(prop->key(), kStack);
1161 // Record source code position for IC call.
1162 SetSourcePosition(prop->position());
1163 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1164 __ Call(ic, RelocInfo::CODE_TARGET);
1165 // Load receiver object into r1.
1166 if (prop->is_synthetic()) {
1167 __ ldr(r1, CodeGenerator::GlobalObject());
1168 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
1169 } else {
1170 __ ldr(r1, MemOperand(sp, kPointerSize));
1171 }
1172 // Overwrite (object, key) with (function, receiver).
1173 __ str(r0, MemOperand(sp, kPointerSize));
1174 __ str(r1, MemOperand(sp));
1175 EmitCallWithStub(expr);
1176 }
1177 } else {
1178 // Call to some other expression. If the expression is an anonymous
1179 // function literal not called in a loop, mark it as one that should
1180 // also use the fast code generator.
1181 FunctionLiteral* lit = fun->AsFunctionLiteral();
1182 if (lit != NULL &&
1183 lit->name()->Equals(Heap::empty_string()) &&
1184 loop_depth() == 0) {
1185 lit->set_try_full_codegen(true);
1186 }
1187 VisitForValue(fun, kStack);
1188 // Load global receiver object.
1189 __ ldr(r1, CodeGenerator::GlobalObject());
1190 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
1191 __ push(r1);
1192 // Emit function call.
1193 EmitCallWithStub(expr);
1194 }
1195}
1196
1197
1198void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1199 Comment cmnt(masm_, "[ CallNew");
1200 // According to ECMA-262, section 11.2.2, page 44, the function
1201 // expression in new calls must be evaluated before the
1202 // arguments.
1203 // Push function on the stack.
1204 VisitForValue(expr->expression(), kStack);
1205
1206 // Push global object (receiver).
1207 __ ldr(r0, CodeGenerator::GlobalObject());
1208 __ push(r0);
1209 // Push the arguments ("left-to-right") on the stack.
1210 ZoneList<Expression*>* args = expr->arguments();
1211 int arg_count = args->length();
1212 for (int i = 0; i < arg_count; i++) {
1213 VisitForValue(args->at(i), kStack);
1214 }
1215
1216 // Call the construct call builtin that handles allocation and
1217 // constructor invocation.
1218 SetSourcePosition(expr->position());
1219
1220 // Load function, arg_count into r1 and r0.
1221 __ mov(r0, Operand(arg_count));
1222 // Function is in sp[arg_count + 1].
1223 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1224
1225 Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
1226 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
1227
1228 // Replace function on TOS with result in r0, or pop it.
1229 DropAndApply(1, context_, r0);
1230}
1231
1232
1233void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
1234 Comment cmnt(masm_, "[ CallRuntime");
1235 ZoneList<Expression*>* args = expr->arguments();
1236
1237 if (expr->is_jsruntime()) {
1238 // Prepare for calling JS runtime function.
1239 __ mov(r1, Operand(expr->name()));
1240 __ ldr(r0, CodeGenerator::GlobalObject());
1241 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
1242 __ stm(db_w, sp, r1.bit() | r0.bit());
1243 }
1244
1245 // Push the arguments ("left-to-right").
1246 int arg_count = args->length();
1247 for (int i = 0; i < arg_count; i++) {
1248 VisitForValue(args->at(i), kStack);
1249 }
1250
1251 if (expr->is_jsruntime()) {
1252 // Call the JS runtime function.
1253 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count,
1254 NOT_IN_LOOP);
1255 __ Call(ic, RelocInfo::CODE_TARGET);
1256 // Restore context register.
1257 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1258 // Discard the function left on TOS.
1259 DropAndApply(1, context_, r0);
1260 } else {
1261 // Call the C runtime function.
1262 __ CallRuntime(expr->function(), arg_count);
1263 Apply(context_, r0);
1264 }
1265}
1266
1267
1268void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
1269 switch (expr->op()) {
1270 case Token::VOID: {
1271 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
1272 VisitForEffect(expr->expression());
1273 switch (context_) {
1274 case Expression::kUninitialized:
1275 UNREACHABLE();
1276 break;
1277 case Expression::kEffect:
1278 break;
1279 case Expression::kValue:
1280 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex);
1281 switch (location_) {
1282 case kAccumulator:
1283 break;
1284 case kStack:
1285 __ push(result_register());
1286 break;
1287 }
1288 break;
1289 case Expression::kTestValue:
1290 // Value is false so it's needed.
1291 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex);
1292 switch (location_) {
1293 case kAccumulator:
1294 break;
1295 case kStack:
1296 __ push(result_register());
1297 break;
1298 }
1299 // Fall through.
1300 case Expression::kTest:
1301 case Expression::kValueTest:
1302 __ jmp(false_label_);
1303 break;
1304 }
1305 break;
1306 }
1307
1308 case Token::NOT: {
1309 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
1310 Label materialize_true, materialize_false, done;
1311 // Initially assume a pure test context. Notice that the labels are
1312 // swapped.
1313 Label* if_true = false_label_;
1314 Label* if_false = true_label_;
1315 switch (context_) {
1316 case Expression::kUninitialized:
1317 UNREACHABLE();
1318 break;
1319 case Expression::kEffect:
1320 if_true = &done;
1321 if_false = &done;
1322 break;
1323 case Expression::kValue:
1324 if_true = &materialize_false;
1325 if_false = &materialize_true;
1326 break;
1327 case Expression::kTest:
1328 break;
1329 case Expression::kValueTest:
1330 if_false = &materialize_true;
1331 break;
1332 case Expression::kTestValue:
1333 if_true = &materialize_false;
1334 break;
1335 }
1336 VisitForControl(expr->expression(), if_true, if_false);
1337 Apply(context_, if_false, if_true); // Labels swapped.
1338 break;
1339 }
1340
1341 case Token::TYPEOF: {
1342 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
1343 VariableProxy* proxy = expr->expression()->AsVariableProxy();
1344 if (proxy != NULL &&
1345 !proxy->var()->is_this() &&
1346 proxy->var()->is_global()) {
1347 Comment cmnt(masm_, "Global variable");
1348 __ ldr(r0, CodeGenerator::GlobalObject());
1349 __ push(r0);
1350 __ mov(r2, Operand(proxy->name()));
1351 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
1352 // Use a regular load, not a contextual load, to avoid a reference
1353 // error.
1354 __ Call(ic, RelocInfo::CODE_TARGET);
1355 __ str(r0, MemOperand(sp));
1356 } else if (proxy != NULL &&
1357 proxy->var()->slot() != NULL &&
1358 proxy->var()->slot()->type() == Slot::LOOKUP) {
1359 __ mov(r0, Operand(proxy->name()));
1360 __ stm(db_w, sp, cp.bit() | r0.bit());
1361 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
1362 __ push(r0);
1363 } else {
1364 // This expression cannot throw a reference error at the top level.
1365 VisitForValue(expr->expression(), kStack);
1366 }
1367
1368 __ CallRuntime(Runtime::kTypeof, 1);
1369 Apply(context_, r0);
1370 break;
1371 }
1372
1373 case Token::ADD: {
1374 Comment cmt(masm_, "[ UnaryOperation (ADD)");
1375 VisitForValue(expr->expression(), kAccumulator);
1376 Label no_conversion;
1377 __ tst(result_register(), Operand(kSmiTagMask));
1378 __ b(eq, &no_conversion);
1379 __ push(r0);
1380 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
1381 __ bind(&no_conversion);
1382 Apply(context_, result_register());
1383 break;
1384 }
1385
1386 case Token::SUB: {
1387 Comment cmt(masm_, "[ UnaryOperation (SUB)");
1388 bool overwrite =
1389 (expr->expression()->AsBinaryOperation() != NULL &&
1390 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
1391 GenericUnaryOpStub stub(Token::SUB, overwrite);
1392 // GenericUnaryOpStub expects the argument to be in the
1393 // accumulator register r0.
1394 VisitForValue(expr->expression(), kAccumulator);
1395 __ CallStub(&stub);
1396 Apply(context_, r0);
1397 break;
1398 }
1399
1400 case Token::BIT_NOT: {
1401 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
1402 bool overwrite =
1403 (expr->expression()->AsBinaryOperation() != NULL &&
1404 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
1405 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
1406 // GenericUnaryOpStub expects the argument to be in the
1407 // accumulator register r0.
1408 VisitForValue(expr->expression(), kAccumulator);
1409 // Avoid calling the stub for Smis.
1410 Label smi, done;
1411 __ tst(result_register(), Operand(kSmiTagMask));
1412 __ b(eq, &smi);
1413 // Non-smi: call stub leaving result in accumulator register.
1414 __ CallStub(&stub);
1415 __ b(&done);
1416 // Perform operation directly on Smis.
1417 __ bind(&smi);
1418 __ mvn(result_register(), Operand(result_register()));
1419 // Bit-clear inverted smi-tag.
1420 __ bic(result_register(), result_register(), Operand(kSmiTagMask));
1421 __ bind(&done);
1422 Apply(context_, result_register());
1423 break;
1424 }
1425
1426 default:
1427 UNREACHABLE();
1428 }
1429}
1430
1431
1432void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
1433 Comment cmnt(masm_, "[ CountOperation");
1434
1435 // Expression can only be a property, a global or a (parameter or local)
1436 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1437 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1438 LhsKind assign_type = VARIABLE;
1439 Property* prop = expr->expression()->AsProperty();
1440 // In case of a property we use the uninitialized expression context
1441 // of the key to detect a named property.
1442 if (prop != NULL) {
1443 assign_type =
1444 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
1445 }
1446
1447 // Evaluate expression and get value.
1448 if (assign_type == VARIABLE) {
1449 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
1450 Location saved_location = location_;
1451 location_ = kAccumulator;
1452 EmitVariableLoad(expr->expression()->AsVariableProxy()->var(),
1453 Expression::kValue);
1454 location_ = saved_location;
1455 } else {
1456 // Reserve space for result of postfix operation.
1457 if (expr->is_postfix() && context_ != Expression::kEffect) {
1458 __ mov(ip, Operand(Smi::FromInt(0)));
1459 __ push(ip);
1460 }
1461 VisitForValue(prop->obj(), kStack);
1462 if (assign_type == NAMED_PROPERTY) {
1463 EmitNamedPropertyLoad(prop);
1464 } else {
1465 VisitForValue(prop->key(), kStack);
1466 EmitKeyedPropertyLoad(prop);
1467 }
1468 }
1469
1470 // Call ToNumber only if operand is not a smi.
1471 Label no_conversion;
1472 __ tst(r0, Operand(kSmiTagMask));
1473 __ b(eq, &no_conversion);
1474 __ push(r0);
1475 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
1476 __ bind(&no_conversion);
1477
1478 // Save result for postfix expressions.
1479 if (expr->is_postfix()) {
1480 switch (context_) {
1481 case Expression::kUninitialized:
1482 UNREACHABLE();
1483 case Expression::kEffect:
1484 // Do not save result.
1485 break;
1486 case Expression::kValue:
1487 case Expression::kTest:
1488 case Expression::kValueTest:
1489 case Expression::kTestValue:
1490 // Save the result on the stack. If we have a named or keyed property
1491 // we store the result under the receiver that is currently on top
1492 // of the stack.
1493 switch (assign_type) {
1494 case VARIABLE:
1495 __ push(r0);
1496 break;
1497 case NAMED_PROPERTY:
1498 __ str(r0, MemOperand(sp, kPointerSize));
1499 break;
1500 case KEYED_PROPERTY:
1501 __ str(r0, MemOperand(sp, 2 * kPointerSize));
1502 break;
1503 }
1504 break;
1505 }
1506 }
1507
1508
1509 // Inline smi case if we are in a loop.
1510 Label stub_call, done;
1511 if (loop_depth() > 0) {
1512 __ add(r0, r0, Operand(expr->op() == Token::INC
1513 ? Smi::FromInt(1)
1514 : Smi::FromInt(-1)));
1515 __ b(vs, &stub_call);
1516 // We could eliminate this smi check if we split the code at
1517 // the first smi check before calling ToNumber.
1518 __ tst(r0, Operand(kSmiTagMask));
1519 __ b(eq, &done);
1520 __ bind(&stub_call);
1521 // Call stub. Undo operation first.
1522 __ sub(r0, r0, Operand(r1));
1523 }
1524 __ mov(r1, Operand(expr->op() == Token::INC
1525 ? Smi::FromInt(1)
1526 : Smi::FromInt(-1)));
1527 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
1528 __ CallStub(&stub);
1529 __ bind(&done);
1530
1531 // Store the value returned in r0.
1532 switch (assign_type) {
1533 case VARIABLE:
1534 if (expr->is_postfix()) {
1535 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
1536 Expression::kEffect);
1537 // For all contexts except kEffect: We have the result on
1538 // top of the stack.
1539 if (context_ != Expression::kEffect) {
1540 ApplyTOS(context_);
1541 }
1542 } else {
1543 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
1544 context_);
1545 }
1546 break;
1547 case NAMED_PROPERTY: {
1548 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1549 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
1550 __ Call(ic, RelocInfo::CODE_TARGET);
1551 if (expr->is_postfix()) {
1552 __ Drop(1); // Result is on the stack under the receiver.
1553 if (context_ != Expression::kEffect) {
1554 ApplyTOS(context_);
1555 }
1556 } else {
1557 DropAndApply(1, context_, r0);
1558 }
1559 break;
1560 }
1561 case KEYED_PROPERTY: {
1562 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
1563 __ Call(ic, RelocInfo::CODE_TARGET);
1564 if (expr->is_postfix()) {
1565 __ Drop(2); // Result is on the stack under the key and the receiver.
1566 if (context_ != Expression::kEffect) {
1567 ApplyTOS(context_);
1568 }
1569 } else {
1570 DropAndApply(2, context_, r0);
1571 }
1572 break;
1573 }
1574 }
1575}
1576
1577
1578void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
1579 Comment cmnt(masm_, "[ BinaryOperation");
1580 switch (expr->op()) {
1581 case Token::COMMA:
1582 VisitForEffect(expr->left());
1583 Visit(expr->right());
1584 break;
1585
1586 case Token::OR:
1587 case Token::AND:
1588 EmitLogicalOperation(expr);
1589 break;
1590
1591 case Token::ADD:
1592 case Token::SUB:
1593 case Token::DIV:
1594 case Token::MOD:
1595 case Token::MUL:
1596 case Token::BIT_OR:
1597 case Token::BIT_AND:
1598 case Token::BIT_XOR:
1599 case Token::SHL:
1600 case Token::SHR:
1601 case Token::SAR:
1602 VisitForValue(expr->left(), kStack);
1603 VisitForValue(expr->right(), kAccumulator);
1604 EmitBinaryOp(expr->op(), context_);
1605 break;
1606
1607 default:
1608 UNREACHABLE();
1609 }
1610}
1611
1612
1613void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
1614 Comment cmnt(masm_, "[ CompareOperation");
1615
1616 // Always perform the comparison for its control flow. Pack the result
1617 // into the expression's context after the comparison is performed.
1618 Label materialize_true, materialize_false, done;
1619 // Initially assume we are in a test context.
1620 Label* if_true = true_label_;
1621 Label* if_false = false_label_;
1622 switch (context_) {
1623 case Expression::kUninitialized:
1624 UNREACHABLE();
1625 break;
1626 case Expression::kEffect:
1627 if_true = &done;
1628 if_false = &done;
1629 break;
1630 case Expression::kValue:
1631 if_true = &materialize_true;
1632 if_false = &materialize_false;
1633 break;
1634 case Expression::kTest:
1635 break;
1636 case Expression::kValueTest:
1637 if_true = &materialize_true;
1638 break;
1639 case Expression::kTestValue:
1640 if_false = &materialize_false;
1641 break;
1642 }
1643
1644 VisitForValue(expr->left(), kStack);
1645 switch (expr->op()) {
1646 case Token::IN:
1647 VisitForValue(expr->right(), kStack);
1648 __ InvokeBuiltin(Builtins::IN, CALL_JS);
1649 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1650 __ cmp(r0, ip);
1651 __ b(eq, if_true);
1652 __ jmp(if_false);
1653 break;
1654
1655 case Token::INSTANCEOF: {
1656 VisitForValue(expr->right(), kStack);
1657 InstanceofStub stub;
1658 __ CallStub(&stub);
1659 __ tst(r0, r0);
1660 __ b(eq, if_true); // The stub returns 0 for true.
1661 __ jmp(if_false);
1662 break;
1663 }
1664
1665 default: {
1666 VisitForValue(expr->right(), kAccumulator);
1667 Condition cc = eq;
1668 bool strict = false;
1669 switch (expr->op()) {
1670 case Token::EQ_STRICT:
1671 strict = true;
1672 // Fall through
1673 case Token::EQ:
1674 cc = eq;
1675 __ pop(r1);
1676 break;
1677 case Token::LT:
1678 cc = lt;
1679 __ pop(r1);
1680 break;
1681 case Token::GT:
1682 // Reverse left and right sides to obtain ECMA-262 conversion order.
1683 cc = lt;
1684 __ mov(r1, result_register());
1685 __ pop(r0);
1686 break;
1687 case Token::LTE:
1688 // Reverse left and right sides to obtain ECMA-262 conversion order.
1689 cc = ge;
1690 __ mov(r1, result_register());
1691 __ pop(r0);
1692 break;
1693 case Token::GTE:
1694 cc = ge;
1695 __ pop(r1);
1696 break;
1697 case Token::IN:
1698 case Token::INSTANCEOF:
1699 default:
1700 UNREACHABLE();
1701 }
1702
1703 // The comparison stub expects the smi vs. smi case to be handled
1704 // before it is called.
1705 Label slow_case;
1706 __ orr(r2, r0, Operand(r1));
1707 __ tst(r2, Operand(kSmiTagMask));
1708 __ b(ne, &slow_case);
1709 __ cmp(r1, r0);
1710 __ b(cc, if_true);
1711 __ jmp(if_false);
1712
1713 __ bind(&slow_case);
1714 CompareStub stub(cc, strict);
1715 __ CallStub(&stub);
1716 __ cmp(r0, Operand(0));
1717 __ b(cc, if_true);
1718 __ jmp(if_false);
1719 }
1720 }
1721
1722 // Convert the result of the comparison into one expected for this
1723 // expression's context.
1724 Apply(context_, if_true, if_false);
1725}
1726
1727
1728void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
1729 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1730 Apply(context_, r0);
1731}
1732
1733
1734Register FullCodeGenerator::result_register() { return r0; }
1735
1736
1737Register FullCodeGenerator::context_register() { return cp; }
1738
1739
1740void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
1741 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
1742 __ str(value, MemOperand(fp, frame_offset));
1743}
1744
1745
1746void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
1747 __ ldr(dst, CodeGenerator::ContextOperand(cp, context_index));
1748}
1749
1750
1751// ----------------------------------------------------------------------------
1752// Non-local control flow support.
1753
1754void FullCodeGenerator::EnterFinallyBlock() {
1755 ASSERT(!result_register().is(r1));
1756 // Store result register while executing finally block.
1757 __ push(result_register());
1758 // Cook return address in link register to stack (smi encoded Code* delta)
1759 __ sub(r1, lr, Operand(masm_->CodeObject()));
1760 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
1761 ASSERT_EQ(0, kSmiTag);
1762 __ add(r1, r1, Operand(r1)); // Convert to smi.
1763 __ push(r1);
1764}
1765
1766
1767void FullCodeGenerator::ExitFinallyBlock() {
1768 ASSERT(!result_register().is(r1));
1769 // Restore result register from stack.
1770 __ pop(r1);
1771 // Uncook return address and return.
1772 __ pop(result_register());
1773 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
1774 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
1775 __ add(pc, r1, Operand(masm_->CodeObject()));
1776}
1777
1778
1779#undef __
1780
1781} } // namespace v8::internal