blob: ca1edd46b8b3f3f555a2863f65478cd5424a2f99 [file] [log] [blame]
Andrei Popescu31002712010-02-23 13:46:05 +00001// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28
29#include "v8.h"
30
31#include "bootstrapper.h"
32#include "codegen-inl.h"
Steve Block6ded16b2010-05-10 14:33:55 +010033#include "compiler.h"
Andrei Popescu31002712010-02-23 13:46:05 +000034#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010035#include "ic-inl.h"
Andrei Popescu31002712010-02-23 13:46:05 +000036#include "parser.h"
37#include "register-allocator-inl.h"
38#include "runtime.h"
39#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010040#include "virtual-frame-inl.h"
Andrei Popescu31002712010-02-23 13:46:05 +000041
42
43
44namespace v8 {
45namespace internal {
46
47#define __ ACCESS_MASM(masm_)
48
49
50
Steve Block6ded16b2010-05-10 14:33:55 +010051// -----------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +000052// Platform-specific DeferredCode functions.
53
54
55void DeferredCode::SaveRegisters() {
56 UNIMPLEMENTED_MIPS();
57}
58
59
60void DeferredCode::RestoreRegisters() {
61 UNIMPLEMENTED_MIPS();
62}
63
64
Steve Block6ded16b2010-05-10 14:33:55 +010065// -----------------------------------------------------------------------------
66// CodeGenState implementation.
67
68CodeGenState::CodeGenState(CodeGenerator* owner)
69 : owner_(owner),
70 true_target_(NULL),
71 false_target_(NULL),
72 previous_(NULL) {
73 owner_->set_state(this);
74}
75
76
77CodeGenState::CodeGenState(CodeGenerator* owner,
78 JumpTarget* true_target,
79 JumpTarget* false_target)
80 : owner_(owner),
81 true_target_(true_target),
82 false_target_(false_target),
83 previous_(owner->state()) {
84 owner_->set_state(this);
85}
86
87
88CodeGenState::~CodeGenState() {
89 ASSERT(owner_->state() == this);
90 owner_->set_state(previous_);
91}
92
93
94// -----------------------------------------------------------------------------
Andrei Popescu31002712010-02-23 13:46:05 +000095// CodeGenerator implementation
96
97CodeGenerator::CodeGenerator(MacroAssembler* masm)
98 : deferred_(8),
99 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000100 frame_(NULL),
101 allocator_(NULL),
102 cc_reg_(cc_always),
103 state_(NULL),
104 function_return_is_shadowed_(false) {
105}
106
107
108// Calling conventions:
Steve Block6ded16b2010-05-10 14:33:55 +0100109// fp: caller's frame pointer
Andrei Popescu31002712010-02-23 13:46:05 +0000110// sp: stack pointer
111// a1: called JS function
112// cp: callee's context
113
Steve Block6ded16b2010-05-10 14:33:55 +0100114void CodeGenerator::Generate(CompilationInfo* info) {
115 // Record the position for debugging purposes.
116 CodeForFunctionPosition(info->function());
117
118 // Initialize state.
119 info_ = info;
120 ASSERT(allocator_ == NULL);
121 RegisterAllocator register_allocator(this);
122 allocator_ = &register_allocator;
123 ASSERT(frame_ == NULL);
124 frame_ = new VirtualFrame();
125 cc_reg_ = cc_always;
126
127 {
128 CodeGenState state(this);
129
130 // Registers:
131 // a1: called JS function
132 // ra: return address
133 // fp: caller's frame pointer
134 // sp: stack pointer
135 // cp: callee's context
136 //
137 // Stack:
138 // arguments
139 // receiver
140
141 frame_->Enter();
142
143 // Allocate space for locals and initialize them.
144 frame_->AllocateStackSlots();
145
146 // Initialize the function return target.
147 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
148 function_return_is_shadowed_ = false;
149
150 VirtualFrame::SpilledScope spilled_scope;
151 if (scope()->num_heap_slots() > 0) {
152 UNIMPLEMENTED_MIPS();
153 }
154
155 {
156 Comment cmnt2(masm_, "[ copy context parameters into .context");
157
158 // Note that iteration order is relevant here! If we have the same
159 // parameter twice (e.g., function (x, y, x)), and that parameter
160 // needs to be copied into the context, it must be the last argument
161 // passed to the parameter that needs to be copied. This is a rare
162 // case so we don't check for it, instead we rely on the copying
163 // order: such a parameter is copied repeatedly into the same
164 // context location and thus the last value is what is seen inside
165 // the function.
166 for (int i = 0; i < scope()->num_parameters(); i++) {
167 UNIMPLEMENTED_MIPS();
168 }
169 }
170
171 // Store the arguments object. This must happen after context
172 // initialization because the arguments object may be stored in the
173 // context.
174 if (scope()->arguments() != NULL) {
175 UNIMPLEMENTED_MIPS();
176 }
177
178 // Generate code to 'execute' declarations and initialize functions
179 // (source elements). In case of an illegal redeclaration we need to
180 // handle that instead of processing the declarations.
181 if (scope()->HasIllegalRedeclaration()) {
182 Comment cmnt(masm_, "[ illegal redeclarations");
183 scope()->VisitIllegalRedeclaration(this);
184 } else {
185 Comment cmnt(masm_, "[ declarations");
186 ProcessDeclarations(scope()->declarations());
187 // Bail out if a stack-overflow exception occurred when processing
188 // declarations.
189 if (HasStackOverflow()) return;
190 }
191
192 if (FLAG_trace) {
193 UNIMPLEMENTED_MIPS();
194 }
195
196 // Compile the body of the function in a vanilla state. Don't
197 // bother compiling all the code if the scope has an illegal
198 // redeclaration.
199 if (!scope()->HasIllegalRedeclaration()) {
200 Comment cmnt(masm_, "[ function body");
201#ifdef DEBUG
202 bool is_builtin = Bootstrapper::IsActive();
203 bool should_trace =
204 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
205 if (should_trace) {
206 UNIMPLEMENTED_MIPS();
207 }
208#endif
209 VisitStatementsAndSpill(info->function()->body());
210 }
211 }
212
213 if (has_valid_frame() || function_return_.is_linked()) {
214 if (!function_return_.is_linked()) {
215 CodeForReturnPosition(info->function());
216 }
217 // Registers:
218 // v0: result
219 // sp: stack pointer
220 // fp: frame pointer
221 // cp: callee's context
222
223 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
224
225 function_return_.Bind();
226 if (FLAG_trace) {
227 UNIMPLEMENTED_MIPS();
228 }
229
230 // Add a label for checking the size of the code used for returning.
231 Label check_exit_codesize;
232 masm_->bind(&check_exit_codesize);
233
234 masm_->mov(sp, fp);
235 masm_->lw(fp, MemOperand(sp, 0));
236 masm_->lw(ra, MemOperand(sp, 4));
237 masm_->addiu(sp, sp, 8);
238
239 // Here we use masm_-> instead of the __ macro to avoid the code coverage
240 // tool from instrumenting as we rely on the code size here.
241 // TODO(MIPS): Should we be able to use more than 0x1ffe parameters?
242 masm_->addiu(sp, sp, (scope()->num_parameters() + 1) * kPointerSize);
243 masm_->Jump(ra);
244 // The Jump automatically generates a nop in the branch delay slot.
245
246 // Check that the size of the code used for returning matches what is
247 // expected by the debugger.
248 ASSERT_EQ(kJSReturnSequenceLength,
249 masm_->InstructionsGeneratedSince(&check_exit_codesize));
250 }
251
252 // Code generation state must be reset.
253 ASSERT(!has_cc());
254 ASSERT(state_ == NULL);
255 ASSERT(!function_return_is_shadowed_);
256 function_return_.Unuse();
257 DeleteFrame();
258
259 // Process any deferred code using the register allocator.
260 if (!HasStackOverflow()) {
261 ProcessDeferred();
262 }
263
264 allocator_ = NULL;
265}
266
267
268void CodeGenerator::LoadReference(Reference* ref) {
269 VirtualFrame::SpilledScope spilled_scope;
270 Comment cmnt(masm_, "[ LoadReference");
271 Expression* e = ref->expression();
272 Property* property = e->AsProperty();
273 Variable* var = e->AsVariableProxy()->AsVariable();
274
275 if (property != NULL) {
276 UNIMPLEMENTED_MIPS();
277 } else if (var != NULL) {
278 // The expression is a variable proxy that does not rewrite to a
279 // property. Global variables are treated as named property references.
280 if (var->is_global()) {
281 LoadGlobal();
282 ref->set_type(Reference::NAMED);
283 } else {
284 ASSERT(var->slot() != NULL);
285 ref->set_type(Reference::SLOT);
286 }
287 } else {
288 UNIMPLEMENTED_MIPS();
289 }
290}
291
292
293void CodeGenerator::UnloadReference(Reference* ref) {
294 VirtualFrame::SpilledScope spilled_scope;
295 // Pop a reference from the stack while preserving TOS.
296 Comment cmnt(masm_, "[ UnloadReference");
297 int size = ref->size();
298 if (size > 0) {
299 frame_->EmitPop(a0);
300 frame_->Drop(size);
301 frame_->EmitPush(a0);
302 }
303 ref->set_unloaded();
304}
305
306
307MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
308 // Currently, this assertion will fail if we try to assign to
309 // a constant variable that is constant because it is read-only
310 // (such as the variable referring to a named function expression).
311 // We need to implement assignments to read-only variables.
312 // Ideally, we should do this during AST generation (by converting
313 // such assignments into expression statements); however, in general
314 // we may not be able to make the decision until past AST generation,
315 // that is when the entire program is known.
316 ASSERT(slot != NULL);
317 int index = slot->index();
318 switch (slot->type()) {
319 case Slot::PARAMETER:
320 UNIMPLEMENTED_MIPS();
321 return MemOperand(no_reg, 0);
322
323 case Slot::LOCAL:
324 return frame_->LocalAt(index);
325
326 case Slot::CONTEXT: {
327 UNIMPLEMENTED_MIPS();
328 return MemOperand(no_reg, 0);
329 }
330
331 default:
332 UNREACHABLE();
333 return MemOperand(no_reg, 0);
334 }
335}
336
337
338// Loads a value on TOS. If it is a boolean value, the result may have been
339// (partially) translated into branches, or it may have set the condition
340// code register. If force_cc is set, the value is forced to set the
341// condition code register and no value is pushed. If the condition code
342// register was set, has_cc() is true and cc_reg_ contains the condition to
343// test for 'true'.
344void CodeGenerator::LoadCondition(Expression* x,
345 JumpTarget* true_target,
346 JumpTarget* false_target,
347 bool force_cc) {
348 ASSERT(!has_cc());
349 int original_height = frame_->height();
350
351 { CodeGenState new_state(this, true_target, false_target);
352 Visit(x);
353
354 // If we hit a stack overflow, we may not have actually visited
355 // the expression. In that case, we ensure that we have a
356 // valid-looking frame state because we will continue to generate
357 // code as we unwind the C++ stack.
358 //
359 // It's possible to have both a stack overflow and a valid frame
360 // state (eg, a subexpression overflowed, visiting it returned
361 // with a dummied frame state, and visiting this expression
362 // returned with a normal-looking state).
363 if (HasStackOverflow() &&
364 has_valid_frame() &&
365 !has_cc() &&
366 frame_->height() == original_height) {
367 true_target->Jump();
368 }
369 }
370 if (force_cc && frame_ != NULL && !has_cc()) {
371 // Convert the TOS value to a boolean in the condition code register.
372 UNIMPLEMENTED_MIPS();
373 }
374 ASSERT(!force_cc || !has_valid_frame() || has_cc());
375 ASSERT(!has_valid_frame() ||
376 (has_cc() && frame_->height() == original_height) ||
377 (!has_cc() && frame_->height() == original_height + 1));
378}
379
380
381void CodeGenerator::Load(Expression* x) {
382#ifdef DEBUG
383 int original_height = frame_->height();
384#endif
385 JumpTarget true_target;
386 JumpTarget false_target;
387 LoadCondition(x, &true_target, &false_target, false);
388
389 if (has_cc()) {
390 UNIMPLEMENTED_MIPS();
391 }
392
393 if (true_target.is_linked() || false_target.is_linked()) {
394 UNIMPLEMENTED_MIPS();
395 }
396 ASSERT(has_valid_frame());
397 ASSERT(!has_cc());
398 ASSERT(frame_->height() == original_height + 1);
399}
400
401
402void CodeGenerator::LoadGlobal() {
403 VirtualFrame::SpilledScope spilled_scope;
404 __ lw(a0, GlobalObject());
405 frame_->EmitPush(a0);
406}
407
408
409void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
410 VirtualFrame::SpilledScope spilled_scope;
411 if (slot->type() == Slot::LOOKUP) {
412 UNIMPLEMENTED_MIPS();
413 } else {
414 __ lw(a0, SlotOperand(slot, a2));
415 frame_->EmitPush(a0);
416 if (slot->var()->mode() == Variable::CONST) {
417 UNIMPLEMENTED_MIPS();
418 }
419 }
420}
421
422
423void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
424 ASSERT(slot != NULL);
425 if (slot->type() == Slot::LOOKUP) {
426 UNIMPLEMENTED_MIPS();
427 } else {
428 ASSERT(!slot->var()->is_dynamic());
429
430 JumpTarget exit;
431 if (init_state == CONST_INIT) {
432 UNIMPLEMENTED_MIPS();
433 }
434
435 // We must execute the store. Storing a variable must keep the
436 // (new) value on the stack. This is necessary for compiling
437 // assignment expressions.
438 //
439 // Note: We will reach here even with slot->var()->mode() ==
440 // Variable::CONST because of const declarations which will
441 // initialize consts to 'the hole' value and by doing so, end up
442 // calling this code. a2 may be loaded with context; used below in
443 // RecordWrite.
444 frame_->EmitPop(a0);
445 __ sw(a0, SlotOperand(slot, a2));
446 frame_->EmitPush(a0);
447 if (slot->type() == Slot::CONTEXT) {
448 UNIMPLEMENTED_MIPS();
449 }
450 // If we definitely did not jump over the assignment, we do not need
451 // to bind the exit label. Doing so can defeat peephole
452 // optimization.
453 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
454 exit.Bind();
455 }
456 }
Andrei Popescu31002712010-02-23 13:46:05 +0000457}
458
459
460void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Steve Block6ded16b2010-05-10 14:33:55 +0100461 VirtualFrame::SpilledScope spilled_scope;
462 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
463 VisitAndSpill(statements->at(i));
464 }
Andrei Popescu31002712010-02-23 13:46:05 +0000465}
466
467
468void CodeGenerator::VisitBlock(Block* node) {
469 UNIMPLEMENTED_MIPS();
470}
471
472
473void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100474 VirtualFrame::SpilledScope spilled_scope;
475 frame_->EmitPush(cp);
476 __ li(t0, Operand(pairs));
477 frame_->EmitPush(t0);
478 __ li(t0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
479 frame_->EmitPush(t0);
480 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
481 // The result is discarded.
Andrei Popescu31002712010-02-23 13:46:05 +0000482}
483
484
485void CodeGenerator::VisitDeclaration(Declaration* node) {
486 UNIMPLEMENTED_MIPS();
487}
488
489
490void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100491#ifdef DEBUG
492 int original_height = frame_->height();
493#endif
494 VirtualFrame::SpilledScope spilled_scope;
495 Comment cmnt(masm_, "[ ExpressionStatement");
496 CodeForStatementPosition(node);
497 Expression* expression = node->expression();
498 expression->MarkAsStatement();
499 LoadAndSpill(expression);
500 frame_->Drop();
501 ASSERT(frame_->height() == original_height);
Andrei Popescu31002712010-02-23 13:46:05 +0000502}
503
504
505void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
506 UNIMPLEMENTED_MIPS();
507}
508
509
510void CodeGenerator::VisitIfStatement(IfStatement* node) {
511 UNIMPLEMENTED_MIPS();
512}
513
514
515void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
516 UNIMPLEMENTED_MIPS();
517}
518
519
520void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
521 UNIMPLEMENTED_MIPS();
522}
523
524
525void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100526 VirtualFrame::SpilledScope spilled_scope;
527 Comment cmnt(masm_, "[ ReturnStatement");
528
529 CodeForStatementPosition(node);
530 LoadAndSpill(node->expression());
531 if (function_return_is_shadowed_) {
532 frame_->EmitPop(v0);
533 function_return_.Jump();
534 } else {
535 // Pop the result from the frame and prepare the frame for
536 // returning thus making it easier to merge.
537 frame_->EmitPop(v0);
538 frame_->PrepareForReturn();
539
540 function_return_.Jump();
541 }
Andrei Popescu31002712010-02-23 13:46:05 +0000542}
543
544
545void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
546 UNIMPLEMENTED_MIPS();
547}
548
549
550void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
551 UNIMPLEMENTED_MIPS();
552}
553
554
555void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
556 UNIMPLEMENTED_MIPS();
557}
558
559
560void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
561 UNIMPLEMENTED_MIPS();
562}
563
564
565void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
566 UNIMPLEMENTED_MIPS();
567}
568
569
570void CodeGenerator::VisitForStatement(ForStatement* node) {
571 UNIMPLEMENTED_MIPS();
572}
573
574
575void CodeGenerator::VisitForInStatement(ForInStatement* node) {
576 UNIMPLEMENTED_MIPS();
577}
578
579
580void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
581 UNIMPLEMENTED_MIPS();
582}
583
584
585void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
586 UNIMPLEMENTED_MIPS();
587}
588
589
590void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
591 UNIMPLEMENTED_MIPS();
592}
593
594
595void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
596 UNIMPLEMENTED_MIPS();
597}
598
599
Steve Block6ded16b2010-05-10 14:33:55 +0100600void CodeGenerator::VisitSharedFunctionInfoLiteral(
601 SharedFunctionInfoLiteral* node) {
Andrei Popescu31002712010-02-23 13:46:05 +0000602 UNIMPLEMENTED_MIPS();
603}
604
605
606void CodeGenerator::VisitConditional(Conditional* node) {
607 UNIMPLEMENTED_MIPS();
608}
609
610
611void CodeGenerator::VisitSlot(Slot* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100612#ifdef DEBUG
613 int original_height = frame_->height();
614#endif
615 VirtualFrame::SpilledScope spilled_scope;
616 Comment cmnt(masm_, "[ Slot");
617 LoadFromSlot(node, typeof_state());
618 ASSERT(frame_->height() == original_height + 1);
Andrei Popescu31002712010-02-23 13:46:05 +0000619}
620
621
622void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100623#ifdef DEBUG
624 int original_height = frame_->height();
625#endif
626 VirtualFrame::SpilledScope spilled_scope;
627 Comment cmnt(masm_, "[ VariableProxy");
628
629 Variable* var = node->var();
630 Expression* expr = var->rewrite();
631 if (expr != NULL) {
632 Visit(expr);
633 } else {
634 ASSERT(var->is_global());
635 Reference ref(this, node);
636 ref.GetValueAndSpill();
637 }
638 ASSERT(frame_->height() == original_height + 1);
Andrei Popescu31002712010-02-23 13:46:05 +0000639}
640
641
642void CodeGenerator::VisitLiteral(Literal* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100643#ifdef DEBUG
644 int original_height = frame_->height();
645#endif
646 VirtualFrame::SpilledScope spilled_scope;
647 Comment cmnt(masm_, "[ Literal");
648 __ li(t0, Operand(node->handle()));
649 frame_->EmitPush(t0);
650 ASSERT(frame_->height() == original_height + 1);
Andrei Popescu31002712010-02-23 13:46:05 +0000651}
652
653
654void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
655 UNIMPLEMENTED_MIPS();
656}
657
658
659void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
660 UNIMPLEMENTED_MIPS();
661}
662
663
664void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
665 UNIMPLEMENTED_MIPS();
666}
667
668
669void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
670 UNIMPLEMENTED_MIPS();
671}
672
673
674void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100675#ifdef DEBUG
676 int original_height = frame_->height();
677#endif
678 VirtualFrame::SpilledScope spilled_scope;
679 Comment cmnt(masm_, "[ Assignment");
680
681 { Reference target(this, node->target());
682 if (target.is_illegal()) {
683 // Fool the virtual frame into thinking that we left the assignment's
684 // value on the frame.
685 frame_->EmitPush(zero_reg);
686 ASSERT(frame_->height() == original_height + 1);
687 return;
688 }
689
690 if (node->op() == Token::ASSIGN ||
691 node->op() == Token::INIT_VAR ||
692 node->op() == Token::INIT_CONST) {
693 LoadAndSpill(node->value());
694 } else {
695 UNIMPLEMENTED_MIPS();
696 }
697
698 Variable* var = node->target()->AsVariableProxy()->AsVariable();
699 if (var != NULL &&
700 (var->mode() == Variable::CONST) &&
701 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
702 // Assignment ignored - leave the value on the stack.
703 } else {
704 CodeForSourcePosition(node->position());
705 if (node->op() == Token::INIT_CONST) {
706 // Dynamic constant initializations must use the function context
707 // and initialize the actual constant declared. Dynamic variable
708 // initializations are simply assignments and use SetValue.
709 target.SetValue(CONST_INIT);
710 } else {
711 target.SetValue(NOT_CONST_INIT);
712 }
713 }
714 }
715 ASSERT(frame_->height() == original_height + 1);
Andrei Popescu31002712010-02-23 13:46:05 +0000716}
717
718
719void CodeGenerator::VisitThrow(Throw* node) {
720 UNIMPLEMENTED_MIPS();
721}
722
723
724void CodeGenerator::VisitProperty(Property* node) {
725 UNIMPLEMENTED_MIPS();
726}
727
728
729void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +0100730#ifdef DEBUG
731 int original_height = frame_->height();
732#endif
733 VirtualFrame::SpilledScope spilled_scope;
734 Comment cmnt(masm_, "[ Call");
735
736 Expression* function = node->expression();
737 ZoneList<Expression*>* args = node->arguments();
738
739 // Standard function call.
740 // Check if the function is a variable or a property.
741 Variable* var = function->AsVariableProxy()->AsVariable();
742 Property* property = function->AsProperty();
743
744 // ------------------------------------------------------------------------
745 // Fast-case: Use inline caching.
746 // ---
747 // According to ECMA-262, section 11.2.3, page 44, the function to call
748 // must be resolved after the arguments have been evaluated. The IC code
749 // automatically handles this by loading the arguments before the function
750 // is resolved in cache misses (this also holds for megamorphic calls).
751 // ------------------------------------------------------------------------
752
753 if (var != NULL && var->is_possibly_eval()) {
754 UNIMPLEMENTED_MIPS();
755 } else if (var != NULL && !var->is_this() && var->is_global()) {
756 // ----------------------------------
757 // JavaScript example: 'foo(1, 2, 3)' // foo is global
758 // ----------------------------------
759
760 int arg_count = args->length();
761
762 // We need sp to be 8 bytes aligned when calling the stub.
763 __ SetupAlignedCall(t0, arg_count);
764
765 // Pass the global object as the receiver and let the IC stub
766 // patch the stack to use the global proxy as 'this' in the
767 // invoked function.
768 LoadGlobal();
769
770 // Load the arguments.
771 for (int i = 0; i < arg_count; i++) {
772 LoadAndSpill(args->at(i));
773 }
774
775 // Setup the receiver register and call the IC initialization code.
776 __ li(a2, Operand(var->name()));
777 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
778 Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
779 CodeForSourcePosition(node->position());
780 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
781 arg_count + 1);
782 __ ReturnFromAlignedCall();
783 __ lw(cp, frame_->Context());
784 // Remove the function from the stack.
785 frame_->EmitPush(v0);
786
787 } else if (var != NULL && var->slot() != NULL &&
788 var->slot()->type() == Slot::LOOKUP) {
789 UNIMPLEMENTED_MIPS();
790 } else if (property != NULL) {
791 UNIMPLEMENTED_MIPS();
792 } else {
793 UNIMPLEMENTED_MIPS();
794 }
795
796 ASSERT(frame_->height() == original_height + 1);
Andrei Popescu31002712010-02-23 13:46:05 +0000797}
798
799
800void CodeGenerator::VisitCallNew(CallNew* node) {
801 UNIMPLEMENTED_MIPS();
802}
803
804
805void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
806 UNIMPLEMENTED_MIPS();
807}
808
809
810void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
811 UNIMPLEMENTED_MIPS();
812}
813
814
815void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
816 UNIMPLEMENTED_MIPS();
817}
818
819
820void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
821 UNIMPLEMENTED_MIPS();
822}
823
824
825void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
826 UNIMPLEMENTED_MIPS();
827}
828
829
830void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
831 UNIMPLEMENTED_MIPS();
832}
833
834
Steve Block6ded16b2010-05-10 14:33:55 +0100835void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
836 UNIMPLEMENTED_MIPS();
837}
838
839
840void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
841 UNIMPLEMENTED_MIPS();
842}
843
844
845void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
846 UNIMPLEMENTED_MIPS();
847}
848
849
850void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
851 UNIMPLEMENTED_MIPS();
852}
853
854
Andrei Popescu31002712010-02-23 13:46:05 +0000855// This should generate code that performs a charCodeAt() call or returns
856// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
857// It is not yet implemented on ARM, so it always goes to the slow case.
858void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
859 UNIMPLEMENTED_MIPS();
860}
861
862
Steve Block6ded16b2010-05-10 14:33:55 +0100863void CodeGenerator::GenerateCharFromCode(ZoneList<Expression*>* args) {
864 UNIMPLEMENTED_MIPS();
865}
866
867
Andrei Popescu31002712010-02-23 13:46:05 +0000868void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
869 UNIMPLEMENTED_MIPS();
870}
871
872
Andrei Popescu402d9372010-02-26 13:31:12 +0000873void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
874 UNIMPLEMENTED_MIPS();
875}
876
877
Andrei Popescu31002712010-02-23 13:46:05 +0000878void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
879 UNIMPLEMENTED_MIPS();
880}
881
882
883void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
884 UNIMPLEMENTED_MIPS();
885}
886
887
Steve Block6ded16b2010-05-10 14:33:55 +0100888void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Andrei Popescu31002712010-02-23 13:46:05 +0000889 UNIMPLEMENTED_MIPS();
890}
891
892
Steve Block6ded16b2010-05-10 14:33:55 +0100893void CodeGenerator::GenerateRandomHeapNumber(ZoneList<Expression*>* args) {
Andrei Popescu31002712010-02-23 13:46:05 +0000894 UNIMPLEMENTED_MIPS();
895}
896
897
898void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
899 UNIMPLEMENTED_MIPS();
900}
901
902
903void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
904 UNIMPLEMENTED_MIPS();
905}
906
907
908void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
909 UNIMPLEMENTED_MIPS();
910}
911
912
913void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
914 UNIMPLEMENTED_MIPS();
915}
916
917
918void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
919 UNIMPLEMENTED_MIPS();
920}
921
922
923void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
924 UNIMPLEMENTED_MIPS();
925}
926
927
928void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
929 UNIMPLEMENTED_MIPS();
930}
931
932
933void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
934 UNIMPLEMENTED_MIPS();
935}
936
937
Andrei Popescu402d9372010-02-26 13:31:12 +0000938void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
939 UNIMPLEMENTED_MIPS();
940}
941
942
Andrei Popescu31002712010-02-23 13:46:05 +0000943void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
944 UNIMPLEMENTED_MIPS();
945}
946
947
948void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
949 UNIMPLEMENTED_MIPS();
950}
951
952
953void CodeGenerator::VisitCountOperation(CountOperation* node) {
954 UNIMPLEMENTED_MIPS();
955}
956
957
958void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
959 UNIMPLEMENTED_MIPS();
960}
961
962
963void CodeGenerator::VisitThisFunction(ThisFunction* node) {
964 UNIMPLEMENTED_MIPS();
965}
966
967
968void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
969 UNIMPLEMENTED_MIPS();
970}
971
972
973#ifdef DEBUG
974bool CodeGenerator::HasValidEntryRegisters() { return true; }
975#endif
976
977
978#undef __
979#define __ ACCESS_MASM(masm)
980
Steve Block6ded16b2010-05-10 14:33:55 +0100981// -----------------------------------------------------------------------------
982// Reference support
Andrei Popescu31002712010-02-23 13:46:05 +0000983
Steve Block6ded16b2010-05-10 14:33:55 +0100984Reference::Reference(CodeGenerator* cgen,
985 Expression* expression,
986 bool persist_after_get)
987 : cgen_(cgen),
988 expression_(expression),
989 type_(ILLEGAL),
990 persist_after_get_(persist_after_get) {
991 cgen->LoadReference(this);
992}
993
994
995Reference::~Reference() {
996 ASSERT(is_unloaded() || is_illegal());
997}
998
999
1000Handle<String> Reference::GetName() {
1001 ASSERT(type_ == NAMED);
1002 Property* property = expression_->AsProperty();
1003 if (property == NULL) {
1004 // Global variable reference treated as a named property reference.
1005 VariableProxy* proxy = expression_->AsVariableProxy();
1006 ASSERT(proxy->AsVariable() != NULL);
1007 ASSERT(proxy->AsVariable()->is_global());
1008 return proxy->name();
1009 } else {
1010 Literal* raw_name = property->key()->AsLiteral();
1011 ASSERT(raw_name != NULL);
1012 return Handle<String>(String::cast(*raw_name->handle()));
1013 }
1014}
1015
1016
1017void Reference::GetValue() {
1018 ASSERT(cgen_->HasValidEntryRegisters());
1019 ASSERT(!is_illegal());
1020 ASSERT(!cgen_->has_cc());
1021 Property* property = expression_->AsProperty();
1022 if (property != NULL) {
1023 cgen_->CodeForSourcePosition(property->position());
1024 }
1025
1026 switch (type_) {
1027 case SLOT: {
1028 UNIMPLEMENTED_MIPS();
1029 break;
1030 }
1031
1032 case NAMED: {
1033 UNIMPLEMENTED_MIPS();
1034 break;
1035 }
1036
1037 case KEYED: {
1038 UNIMPLEMENTED_MIPS();
1039 break;
1040 }
1041
1042 default:
1043 UNREACHABLE();
1044 }
1045}
1046
1047
1048void Reference::SetValue(InitState init_state) {
1049 ASSERT(!is_illegal());
1050 ASSERT(!cgen_->has_cc());
1051 MacroAssembler* masm = cgen_->masm();
1052 Property* property = expression_->AsProperty();
1053 if (property != NULL) {
1054 cgen_->CodeForSourcePosition(property->position());
1055 }
1056
1057 switch (type_) {
1058 case SLOT: {
1059 Comment cmnt(masm, "[ Store to Slot");
1060 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
1061 cgen_->StoreToSlot(slot, init_state);
1062 cgen_->UnloadReference(this);
1063 break;
1064 }
1065
1066 case NAMED: {
1067 UNIMPLEMENTED_MIPS();
1068 break;
1069 }
1070
1071 case KEYED: {
1072 UNIMPLEMENTED_MIPS();
1073 break;
1074 }
1075
1076 default:
1077 UNREACHABLE();
1078 }
1079}
1080
1081
1082// On entry a0 and a1 are the things to be compared. On exit v0 is 0,
Andrei Popescu31002712010-02-23 13:46:05 +00001083// positive or negative to indicate the result of the comparison.
1084void CompareStub::Generate(MacroAssembler* masm) {
1085 UNIMPLEMENTED_MIPS();
1086 __ break_(0x765);
1087}
1088
1089
Steve Block6ded16b2010-05-10 14:33:55 +01001090Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
1091 UNIMPLEMENTED_MIPS();
1092 return Handle<Code>::null();
1093}
1094
1095
Andrei Popescu31002712010-02-23 13:46:05 +00001096void StackCheckStub::Generate(MacroAssembler* masm) {
1097 UNIMPLEMENTED_MIPS();
1098 __ break_(0x790);
1099}
1100
1101
1102void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
1103 UNIMPLEMENTED_MIPS();
1104 __ break_(0x808);
1105}
1106
1107
1108void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
1109 UncatchableExceptionType type) {
1110 UNIMPLEMENTED_MIPS();
1111 __ break_(0x815);
1112}
1113
1114void CEntryStub::GenerateCore(MacroAssembler* masm,
1115 Label* throw_normal_exception,
1116 Label* throw_termination_exception,
1117 Label* throw_out_of_memory_exception,
1118 bool do_gc,
1119 bool always_allocate) {
Steve Block6ded16b2010-05-10 14:33:55 +01001120 // s0: number of arguments including receiver (C callee-saved)
1121 // s1: pointer to the first argument (C callee-saved)
1122 // s2: pointer to builtin function (C callee-saved)
1123
1124 if (do_gc) {
1125 UNIMPLEMENTED_MIPS();
1126 }
1127
1128 ExternalReference scope_depth =
1129 ExternalReference::heap_always_allocate_scope_depth();
1130 if (always_allocate) {
1131 UNIMPLEMENTED_MIPS();
1132 }
1133
1134 // Call C built-in.
1135 // a0 = argc, a1 = argv
1136 __ mov(a0, s0);
1137 __ mov(a1, s1);
1138
1139 __ CallBuiltin(s2);
1140
1141 if (always_allocate) {
1142 UNIMPLEMENTED_MIPS();
1143 }
1144
1145 // Check for failure result.
1146 Label failure_returned;
1147 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
1148 __ addiu(a2, v0, 1);
1149 __ andi(t0, a2, kFailureTagMask);
1150 __ Branch(eq, &failure_returned, t0, Operand(zero_reg));
1151
1152 // Exit C frame and return.
1153 // v0:v1: result
1154 // sp: stack pointer
1155 // fp: frame pointer
1156 __ LeaveExitFrame(mode_);
1157
1158 // Check if we should retry or throw exception.
1159 Label retry;
1160 __ bind(&failure_returned);
1161 ASSERT(Failure::RETRY_AFTER_GC == 0);
1162 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
1163 __ Branch(eq, &retry, t0, Operand(zero_reg));
1164
1165 // Special handling of out of memory exceptions.
1166 Failure* out_of_memory = Failure::OutOfMemoryException();
1167 __ Branch(eq, throw_out_of_memory_exception,
1168 v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
1169
1170 // Retrieve the pending exception and clear the variable.
1171 __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
1172 __ lw(a3, MemOperand(t0));
1173 __ LoadExternalReference(t0,
1174 ExternalReference(Top::k_pending_exception_address));
1175 __ lw(v0, MemOperand(t0));
1176 __ sw(a3, MemOperand(t0));
1177
1178 // Special handling of termination exceptions which are uncatchable
1179 // by javascript code.
1180 __ Branch(eq, throw_termination_exception,
1181 v0, Operand(Factory::termination_exception()));
1182
1183 // Handle normal exception.
1184 __ b(throw_normal_exception);
1185 __ nop(); // Branch delay slot nop.
1186
1187 __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
Andrei Popescu31002712010-02-23 13:46:05 +00001188}
1189
1190void CEntryStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01001191 // Called from JavaScript; parameters are on stack as if calling JS function
1192 // a0: number of arguments including receiver
1193 // a1: pointer to builtin function
1194 // fp: frame pointer (restored after C call)
1195 // sp: stack pointer (restored as callee's sp after C call)
1196 // cp: current context (C callee-saved)
1197
1198 // NOTE: Invocations of builtins may return failure objects
1199 // instead of a proper result. The builtin entry handles
1200 // this by performing a garbage collection and retrying the
1201 // builtin once.
1202
1203 // Enter the exit frame that transitions from JavaScript to C++.
1204 __ EnterExitFrame(mode_, s0, s1, s2);
1205
1206 // s0: number of arguments (C callee-saved)
1207 // s1: pointer to first argument (C callee-saved)
1208 // s2: pointer to builtin function (C callee-saved)
1209
1210 Label throw_normal_exception;
1211 Label throw_termination_exception;
1212 Label throw_out_of_memory_exception;
1213
1214 // Call into the runtime system.
1215 GenerateCore(masm,
1216 &throw_normal_exception,
1217 &throw_termination_exception,
1218 &throw_out_of_memory_exception,
1219 false,
1220 false);
1221
1222 // Do space-specific GC and retry runtime call.
1223 GenerateCore(masm,
1224 &throw_normal_exception,
1225 &throw_termination_exception,
1226 &throw_out_of_memory_exception,
1227 true,
1228 false);
1229
1230 // Do full GC and retry runtime call one final time.
1231 Failure* failure = Failure::InternalError();
1232 __ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
1233 GenerateCore(masm,
1234 &throw_normal_exception,
1235 &throw_termination_exception,
1236 &throw_out_of_memory_exception,
1237 true,
1238 true);
1239
1240 __ bind(&throw_out_of_memory_exception);
1241 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
1242
1243 __ bind(&throw_termination_exception);
1244 GenerateThrowUncatchable(masm, TERMINATION);
1245
1246 __ bind(&throw_normal_exception);
1247 GenerateThrowTOS(masm);
Andrei Popescu31002712010-02-23 13:46:05 +00001248}
1249
1250void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Steve Block6ded16b2010-05-10 14:33:55 +01001251 Label invoke, exit;
1252
1253 // Registers:
1254 // a0: entry address
1255 // a1: function
1256 // a2: reveiver
1257 // a3: argc
1258 //
1259 // Stack:
1260 // 4 args slots
1261 // args
1262
1263 // Save callee saved registers on the stack.
1264 __ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit());
1265
1266 // We build an EntryFrame.
1267 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1268 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1269 __ li(t2, Operand(Smi::FromInt(marker)));
1270 __ li(t1, Operand(Smi::FromInt(marker)));
1271 __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
1272 __ lw(t0, MemOperand(t0));
1273 __ MultiPush(t0.bit() | t1.bit() | t2.bit() | t3.bit());
1274
1275 // Setup frame pointer for the frame to be pushed.
1276 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
1277
1278 // Load argv in s0 register.
1279 __ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize +
1280 StandardFrameConstants::kCArgsSlotsSize));
1281
1282 // Registers:
1283 // a0: entry_address
1284 // a1: function
1285 // a2: reveiver_pointer
1286 // a3: argc
1287 // s0: argv
1288 //
1289 // Stack:
1290 // caller fp |
1291 // function slot | entry frame
1292 // context slot |
1293 // bad fp (0xff...f) |
1294 // callee saved registers + ra
1295 // 4 args slots
1296 // args
1297
1298 // Call a faked try-block that does the invoke.
1299 __ bal(&invoke);
1300 __ nop(); // Branch delay slot nop.
1301
1302 // Caught exception: Store result (exception) in the pending
1303 // exception field in the JSEnv and return a failure sentinel.
1304 // Coming in here the fp will be invalid because the PushTryHandler below
1305 // sets it to 0 to signal the existence of the JSEntry frame.
1306 __ LoadExternalReference(t0,
1307 ExternalReference(Top::k_pending_exception_address));
1308 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
1309 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
1310 __ b(&exit);
1311 __ nop(); // Branch delay slot nop.
1312
1313 // Invoke: Link this frame into the handler chain.
1314 __ bind(&invoke);
1315 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
1316 // If an exception not caught by another handler occurs, this handler
1317 // returns control to the code after the bal(&invoke) above, which
1318 // restores all kCalleeSaved registers (including cp and fp) to their
1319 // saved values before returning a failure to C.
1320
1321 // Clear any pending exceptions.
1322 __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
1323 __ lw(t1, MemOperand(t0));
1324 __ LoadExternalReference(t0,
1325 ExternalReference(Top::k_pending_exception_address));
1326 __ sw(t1, MemOperand(t0));
1327
1328 // Invoke the function by calling through JS entry trampoline builtin.
1329 // Notice that we cannot store a reference to the trampoline code directly in
1330 // this stub, because runtime stubs are not traversed when doing GC.
1331
1332 // Registers:
1333 // a0: entry_address
1334 // a1: function
1335 // a2: reveiver_pointer
1336 // a3: argc
1337 // s0: argv
1338 //
1339 // Stack:
1340 // handler frame
1341 // entry frame
1342 // callee saved registers + ra
1343 // 4 args slots
1344 // args
1345
1346 if (is_construct) {
1347 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
1348 __ LoadExternalReference(t0, construct_entry);
1349 } else {
1350 ExternalReference entry(Builtins::JSEntryTrampoline);
1351 __ LoadExternalReference(t0, entry);
1352 }
1353 __ lw(t9, MemOperand(t0)); // deref address
1354
1355 // Call JSEntryTrampoline.
1356 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
1357 __ CallBuiltin(t9);
1358
1359 // Unlink this frame from the handler chain. When reading the
1360 // address of the next handler, there is no need to use the address
1361 // displacement since the current stack pointer (sp) points directly
1362 // to the stack handler.
1363 __ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
1364 __ LoadExternalReference(t0, ExternalReference(Top::k_handler_address));
1365 __ sw(t1, MemOperand(t0));
1366
1367 // This restores sp to its position before PushTryHandler.
1368 __ addiu(sp, sp, StackHandlerConstants::kSize);
1369
1370 __ bind(&exit); // v0 holds result
1371 // Restore the top frame descriptors from the stack.
1372 __ Pop(t1);
1373 __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
1374 __ sw(t1, MemOperand(t0));
1375
1376 // Reset the stack to the callee saved registers.
1377 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
1378
1379 // Restore callee saved registers from the stack.
1380 __ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit());
1381 // Return.
1382 __ Jump(ra);
Andrei Popescu31002712010-02-23 13:46:05 +00001383}
1384
1385
1386// This stub performs an instanceof, calling the builtin function if
Steve Block6ded16b2010-05-10 14:33:55 +01001387// necessary. Uses a1 for the object, a0 for the function that it may
Andrei Popescu31002712010-02-23 13:46:05 +00001388// be an instance of (these are fetched from the stack).
1389void InstanceofStub::Generate(MacroAssembler* masm) {
1390 UNIMPLEMENTED_MIPS();
1391 __ break_(0x845);
1392}
1393
1394
1395void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
1396 UNIMPLEMENTED_MIPS();
1397 __ break_(0x851);
1398}
1399
1400
1401void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1402 UNIMPLEMENTED_MIPS();
1403 __ break_(0x857);
1404}
1405
1406
1407void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
1408 UNIMPLEMENTED_MIPS();
1409 __ break_(0x863);
1410}
1411
1412
1413const char* CompareStub::GetName() {
1414 UNIMPLEMENTED_MIPS();
1415 return NULL; // UNIMPLEMENTED RETURN
1416}
1417
1418
1419int CompareStub::MinorKey() {
1420 // Encode the two parameters in a unique 16 bit value.
1421 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15));
1422 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0);
1423}
1424
1425
1426#undef __
1427
1428} } // namespace v8::internal