Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 1 | // Copyright 2010 the V8 project authors. All rights reserved. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #include "v8.h" |
| 29 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 30 | #if defined(V8_TARGET_ARCH_X64) |
| 31 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 32 | #include "bootstrapper.h" |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 33 | #include "code-stubs.h" |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 34 | #include "codegen-inl.h" |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 35 | #include "compiler.h" |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 36 | #include "debug.h" |
| 37 | #include "ic-inl.h" |
| 38 | #include "parser.h" |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 39 | #include "regexp-macro-assembler.h" |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 40 | #include "register-allocator-inl.h" |
| 41 | #include "scopes.h" |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 42 | #include "virtual-frame-inl.h" |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 43 | |
| 44 | namespace v8 { |
| 45 | namespace internal { |
| 46 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 47 | #define __ ACCESS_MASM(masm) |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 48 | |
| 49 | // ------------------------------------------------------------------------- |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 50 | // Platform-specific FrameRegisterState functions. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 51 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 52 | void FrameRegisterState::Save(MacroAssembler* masm) const { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 53 | for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { |
| 54 | int action = registers_[i]; |
| 55 | if (action == kPush) { |
| 56 | __ push(RegisterAllocator::ToRegister(i)); |
| 57 | } else if (action != kIgnore && (action & kSyncedFlag) == 0) { |
| 58 | __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i)); |
| 59 | } |
| 60 | } |
| 61 | } |
| 62 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 63 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 64 | void FrameRegisterState::Restore(MacroAssembler* masm) const { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 65 | // Restore registers in reverse order due to the stack. |
| 66 | for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) { |
| 67 | int action = registers_[i]; |
| 68 | if (action == kPush) { |
| 69 | __ pop(RegisterAllocator::ToRegister(i)); |
| 70 | } else if (action != kIgnore) { |
| 71 | action &= ~kSyncedFlag; |
| 72 | __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action)); |
| 73 | } |
| 74 | } |
| 75 | } |
| 76 | |
| 77 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 78 | #undef __ |
| 79 | #define __ ACCESS_MASM(masm_) |
| 80 | |
| 81 | // ------------------------------------------------------------------------- |
| 82 | // Platform-specific DeferredCode functions. |
| 83 | |
| 84 | void DeferredCode::SaveRegisters() { |
| 85 | frame_state_.Save(masm_); |
| 86 | } |
| 87 | |
| 88 | |
| 89 | void DeferredCode::RestoreRegisters() { |
| 90 | frame_state_.Restore(masm_); |
| 91 | } |
| 92 | |
| 93 | |
| 94 | // ------------------------------------------------------------------------- |
| 95 | // Platform-specific RuntimeCallHelper functions. |
| 96 | |
| 97 | void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { |
| 98 | frame_state_->Save(masm); |
| 99 | } |
| 100 | |
| 101 | |
| 102 | void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { |
| 103 | frame_state_->Restore(masm); |
| 104 | } |
| 105 | |
| 106 | |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 107 | void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 108 | masm->EnterInternalFrame(); |
| 109 | } |
| 110 | |
| 111 | |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 112 | void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 113 | masm->LeaveInternalFrame(); |
| 114 | } |
| 115 | |
| 116 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 117 | // ------------------------------------------------------------------------- |
| 118 | // CodeGenState implementation. |
| 119 | |
| 120 | CodeGenState::CodeGenState(CodeGenerator* owner) |
| 121 | : owner_(owner), |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 122 | destination_(NULL), |
| 123 | previous_(NULL) { |
| 124 | owner_->set_state(this); |
| 125 | } |
| 126 | |
| 127 | |
| 128 | CodeGenState::CodeGenState(CodeGenerator* owner, |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 129 | ControlDestination* destination) |
| 130 | : owner_(owner), |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 131 | destination_(destination), |
| 132 | previous_(owner->state()) { |
| 133 | owner_->set_state(this); |
| 134 | } |
| 135 | |
| 136 | |
| 137 | CodeGenState::~CodeGenState() { |
| 138 | ASSERT(owner_->state() == this); |
| 139 | owner_->set_state(previous_); |
| 140 | } |
| 141 | |
| 142 | |
| 143 | // ------------------------------------------------------------------------- |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 144 | // CodeGenerator implementation. |
| 145 | |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 146 | CodeGenerator::CodeGenerator(MacroAssembler* masm) |
| 147 | : deferred_(8), |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 148 | masm_(masm), |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 149 | info_(NULL), |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 150 | frame_(NULL), |
| 151 | allocator_(NULL), |
| 152 | state_(NULL), |
| 153 | loop_nesting_(0), |
| 154 | function_return_is_shadowed_(false), |
| 155 | in_spilled_code_(false) { |
| 156 | } |
| 157 | |
| 158 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 159 | // Calling conventions: |
| 160 | // rbp: caller's frame pointer |
| 161 | // rsp: stack pointer |
| 162 | // rdi: called JS function |
| 163 | // rsi: callee's context |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 164 | |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 165 | void CodeGenerator::Generate(CompilationInfo* info) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 166 | // Record the position for debugging purposes. |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 167 | CodeForFunctionPosition(info->function()); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 168 | Comment cmnt(masm_, "[ function compiled by virtual frame code generator"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 169 | |
| 170 | // Initialize state. |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 171 | info_ = info; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 172 | ASSERT(allocator_ == NULL); |
| 173 | RegisterAllocator register_allocator(this); |
| 174 | allocator_ = ®ister_allocator; |
| 175 | ASSERT(frame_ == NULL); |
| 176 | frame_ = new VirtualFrame(); |
| 177 | set_in_spilled_code(false); |
| 178 | |
| 179 | // Adjust for function-level loop nesting. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 180 | ASSERT_EQ(0, loop_nesting_); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 181 | loop_nesting_ = info->is_in_loop() ? 1 : 0; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 182 | |
| 183 | JumpTarget::set_compiling_deferred_code(false); |
| 184 | |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 185 | { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 186 | CodeGenState state(this); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 187 | // Entry: |
| 188 | // Stack: receiver, arguments, return address. |
| 189 | // rbp: caller's frame pointer |
| 190 | // rsp: stack pointer |
| 191 | // rdi: called JS function |
| 192 | // rsi: callee's context |
| 193 | allocator_->Initialize(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 194 | |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 195 | #ifdef DEBUG |
| 196 | if (strlen(FLAG_stop_at) > 0 && |
| 197 | info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
| 198 | frame_->SpillAll(); |
| 199 | __ int3(); |
| 200 | } |
| 201 | #endif |
| 202 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 203 | frame_->Enter(); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 204 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 205 | // Allocate space for locals and initialize them. |
| 206 | frame_->AllocateStackSlots(); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 207 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 208 | // Allocate the local context if needed. |
| 209 | int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 210 | if (heap_slots > 0) { |
| 211 | Comment cmnt(masm_, "[ allocate local context"); |
| 212 | // Allocate local context. |
| 213 | // Get outer context and create a new context based on it. |
| 214 | frame_->PushFunction(); |
| 215 | Result context; |
| 216 | if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 217 | FastNewContextStub stub(heap_slots); |
| 218 | context = frame_->CallStub(&stub, 1); |
| 219 | } else { |
| 220 | context = frame_->CallRuntime(Runtime::kNewContext, 1); |
| 221 | } |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 222 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 223 | // Update context local. |
| 224 | frame_->SaveContextRegister(); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 225 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 226 | // Verify that the runtime call result and rsi agree. |
| 227 | if (FLAG_debug_code) { |
| 228 | __ cmpq(context.reg(), rsi); |
| 229 | __ Assert(equal, "Runtime::NewContext should end up in rsi"); |
| 230 | } |
| 231 | } |
| 232 | |
| 233 | // TODO(1241774): Improve this code: |
| 234 | // 1) only needed if we have a context |
| 235 | // 2) no need to recompute context ptr every single time |
| 236 | // 3) don't copy parameter operand code from SlotOperand! |
| 237 | { |
| 238 | Comment cmnt2(masm_, "[ copy context parameters into .context"); |
| 239 | // Note that iteration order is relevant here! If we have the same |
| 240 | // parameter twice (e.g., function (x, y, x)), and that parameter |
| 241 | // needs to be copied into the context, it must be the last argument |
| 242 | // passed to the parameter that needs to be copied. This is a rare |
| 243 | // case so we don't check for it, instead we rely on the copying |
| 244 | // order: such a parameter is copied repeatedly into the same |
| 245 | // context location and thus the last value is what is seen inside |
| 246 | // the function. |
| 247 | for (int i = 0; i < scope()->num_parameters(); i++) { |
| 248 | Variable* par = scope()->parameter(i); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 249 | Slot* slot = par->AsSlot(); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 250 | if (slot != NULL && slot->type() == Slot::CONTEXT) { |
| 251 | // The use of SlotOperand below is safe in unspilled code |
| 252 | // because the slot is guaranteed to be a context slot. |
| 253 | // |
| 254 | // There are no parameters in the global scope. |
| 255 | ASSERT(!scope()->is_global_scope()); |
| 256 | frame_->PushParameterAt(i); |
| 257 | Result value = frame_->Pop(); |
| 258 | value.ToRegister(); |
| 259 | |
| 260 | // SlotOperand loads context.reg() with the context object |
| 261 | // stored to, used below in RecordWrite. |
| 262 | Result context = allocator_->Allocate(); |
| 263 | ASSERT(context.is_valid()); |
| 264 | __ movq(SlotOperand(slot, context.reg()), value.reg()); |
| 265 | int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
| 266 | Result scratch = allocator_->Allocate(); |
| 267 | ASSERT(scratch.is_valid()); |
| 268 | frame_->Spill(context.reg()); |
| 269 | frame_->Spill(value.reg()); |
| 270 | __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg()); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 271 | } |
| 272 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 273 | } |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 274 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 275 | // Store the arguments object. This must happen after context |
| 276 | // initialization because the arguments object may be stored in |
| 277 | // the context. |
| 278 | if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { |
| 279 | StoreArgumentsObject(true); |
| 280 | } |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 281 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 282 | // Initialize ThisFunction reference if present. |
| 283 | if (scope()->is_function_scope() && scope()->function() != NULL) { |
| 284 | frame_->Push(Factory::the_hole_value()); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 285 | StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 286 | } |
| 287 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 288 | // Initialize the function return target after the locals are set |
| 289 | // up, because it needs the expected frame height from the frame. |
| 290 | function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
| 291 | function_return_is_shadowed_ = false; |
| 292 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 293 | // Generate code to 'execute' declarations and initialize functions |
| 294 | // (source elements). In case of an illegal redeclaration we need to |
| 295 | // handle that instead of processing the declarations. |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 296 | if (scope()->HasIllegalRedeclaration()) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 297 | Comment cmnt(masm_, "[ illegal redeclarations"); |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 298 | scope()->VisitIllegalRedeclaration(this); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 299 | } else { |
| 300 | Comment cmnt(masm_, "[ declarations"); |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 301 | ProcessDeclarations(scope()->declarations()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 302 | // Bail out if a stack-overflow exception occurred when processing |
| 303 | // declarations. |
| 304 | if (HasStackOverflow()) return; |
| 305 | } |
| 306 | |
| 307 | if (FLAG_trace) { |
| 308 | frame_->CallRuntime(Runtime::kTraceEnter, 0); |
| 309 | // Ignore the return value. |
| 310 | } |
| 311 | CheckStack(); |
| 312 | |
| 313 | // Compile the body of the function in a vanilla state. Don't |
| 314 | // bother compiling all the code if the scope has an illegal |
| 315 | // redeclaration. |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 316 | if (!scope()->HasIllegalRedeclaration()) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 317 | Comment cmnt(masm_, "[ function body"); |
| 318 | #ifdef DEBUG |
| 319 | bool is_builtin = Bootstrapper::IsActive(); |
| 320 | bool should_trace = |
| 321 | is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; |
| 322 | if (should_trace) { |
| 323 | frame_->CallRuntime(Runtime::kDebugTrace, 0); |
| 324 | // Ignore the return value. |
| 325 | } |
| 326 | #endif |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 327 | VisitStatements(info->function()->body()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 328 | |
| 329 | // Handle the return from the function. |
| 330 | if (has_valid_frame()) { |
| 331 | // If there is a valid frame, control flow can fall off the end of |
| 332 | // the body. In that case there is an implicit return statement. |
| 333 | ASSERT(!function_return_is_shadowed_); |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 334 | CodeForReturnPosition(info->function()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 335 | frame_->PrepareForReturn(); |
| 336 | Result undefined(Factory::undefined_value()); |
| 337 | if (function_return_.is_bound()) { |
| 338 | function_return_.Jump(&undefined); |
| 339 | } else { |
| 340 | function_return_.Bind(&undefined); |
| 341 | GenerateReturnSequence(&undefined); |
| 342 | } |
| 343 | } else if (function_return_.is_linked()) { |
| 344 | // If the return target has dangling jumps to it, then we have not |
| 345 | // yet generated the return sequence. This can happen when (a) |
| 346 | // control does not flow off the end of the body so we did not |
| 347 | // compile an artificial return statement just above, and (b) there |
| 348 | // are return statements in the body but (c) they are all shadowed. |
| 349 | Result return_value; |
| 350 | function_return_.Bind(&return_value); |
| 351 | GenerateReturnSequence(&return_value); |
| 352 | } |
| 353 | } |
| 354 | } |
| 355 | |
| 356 | // Adjust for function-level loop nesting. |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 357 | ASSERT_EQ(loop_nesting_, info->is_in_loop() ? 1 : 0); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 358 | loop_nesting_ = 0; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 359 | |
| 360 | // Code generation state must be reset. |
| 361 | ASSERT(state_ == NULL); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 362 | ASSERT(!function_return_is_shadowed_); |
| 363 | function_return_.Unuse(); |
| 364 | DeleteFrame(); |
| 365 | |
| 366 | // Process any deferred code using the register allocator. |
| 367 | if (!HasStackOverflow()) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 368 | JumpTarget::set_compiling_deferred_code(true); |
| 369 | ProcessDeferred(); |
| 370 | JumpTarget::set_compiling_deferred_code(false); |
| 371 | } |
| 372 | |
| 373 | // There is no need to delete the register allocator, it is a |
| 374 | // stack-allocated local. |
| 375 | allocator_ = NULL; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 376 | } |
| 377 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 378 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 379 | Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { |
| 380 | // Currently, this assertion will fail if we try to assign to |
| 381 | // a constant variable that is constant because it is read-only |
| 382 | // (such as the variable referring to a named function expression). |
| 383 | // We need to implement assignments to read-only variables. |
| 384 | // Ideally, we should do this during AST generation (by converting |
| 385 | // such assignments into expression statements); however, in general |
| 386 | // we may not be able to make the decision until past AST generation, |
| 387 | // that is when the entire program is known. |
| 388 | ASSERT(slot != NULL); |
| 389 | int index = slot->index(); |
| 390 | switch (slot->type()) { |
| 391 | case Slot::PARAMETER: |
| 392 | return frame_->ParameterAt(index); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 393 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 394 | case Slot::LOCAL: |
| 395 | return frame_->LocalAt(index); |
| 396 | |
| 397 | case Slot::CONTEXT: { |
| 398 | // Follow the context chain if necessary. |
| 399 | ASSERT(!tmp.is(rsi)); // do not overwrite context register |
| 400 | Register context = rsi; |
| 401 | int chain_length = scope()->ContextChainLength(slot->var()->scope()); |
| 402 | for (int i = 0; i < chain_length; i++) { |
| 403 | // Load the closure. |
| 404 | // (All contexts, even 'with' contexts, have a closure, |
| 405 | // and it is the same for all contexts inside a function. |
| 406 | // There is no need to go to the function context first.) |
| 407 | __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
| 408 | // Load the function context (which is the incoming, outer context). |
| 409 | __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset)); |
| 410 | context = tmp; |
| 411 | } |
| 412 | // We may have a 'with' context now. Get the function context. |
| 413 | // (In fact this mov may never be the needed, since the scope analysis |
| 414 | // may not permit a direct context access in this case and thus we are |
| 415 | // always at a function context. However it is safe to dereference be- |
| 416 | // cause the function context of a function context is itself. Before |
| 417 | // deleting this mov we should try to create a counter-example first, |
| 418 | // though...) |
| 419 | __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX)); |
| 420 | return ContextOperand(tmp, index); |
| 421 | } |
| 422 | |
| 423 | default: |
| 424 | UNREACHABLE(); |
| 425 | return Operand(rsp, 0); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 426 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 427 | } |
| 428 | |
| 429 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 430 | Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot, |
| 431 | Result tmp, |
| 432 | JumpTarget* slow) { |
| 433 | ASSERT(slot->type() == Slot::CONTEXT); |
| 434 | ASSERT(tmp.is_register()); |
| 435 | Register context = rsi; |
| 436 | |
| 437 | for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { |
| 438 | if (s->num_heap_slots() > 0) { |
| 439 | if (s->calls_eval()) { |
| 440 | // Check that extension is NULL. |
| 441 | __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
| 442 | Immediate(0)); |
| 443 | slow->Branch(not_equal, not_taken); |
| 444 | } |
| 445 | __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX)); |
| 446 | __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); |
| 447 | context = tmp.reg(); |
| 448 | } |
| 449 | } |
| 450 | // Check that last extension is NULL. |
| 451 | __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); |
| 452 | slow->Branch(not_equal, not_taken); |
| 453 | __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX)); |
| 454 | return ContextOperand(tmp.reg(), slot->index()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 455 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 456 | |
| 457 | |
| 458 | // Emit code to load the value of an expression to the top of the |
| 459 | // frame. If the expression is boolean-valued it may be compiled (or |
| 460 | // partially compiled) into control flow to the control destination. |
| 461 | // If force_control is true, control flow is forced. |
| 462 | void CodeGenerator::LoadCondition(Expression* expr, |
| 463 | ControlDestination* dest, |
| 464 | bool force_control) { |
| 465 | ASSERT(!in_spilled_code()); |
| 466 | int original_height = frame_->height(); |
| 467 | |
| 468 | { CodeGenState new_state(this, dest); |
| 469 | Visit(expr); |
| 470 | |
| 471 | // If we hit a stack overflow, we may not have actually visited |
| 472 | // the expression. In that case, we ensure that we have a |
| 473 | // valid-looking frame state because we will continue to generate |
| 474 | // code as we unwind the C++ stack. |
| 475 | // |
| 476 | // It's possible to have both a stack overflow and a valid frame |
| 477 | // state (eg, a subexpression overflowed, visiting it returned |
| 478 | // with a dummied frame state, and visiting this expression |
| 479 | // returned with a normal-looking state). |
| 480 | if (HasStackOverflow() && |
| 481 | !dest->is_used() && |
| 482 | frame_->height() == original_height) { |
| 483 | dest->Goto(true); |
| 484 | } |
| 485 | } |
| 486 | |
| 487 | if (force_control && !dest->is_used()) { |
| 488 | // Convert the TOS value into flow to the control destination. |
| 489 | ToBoolean(dest); |
| 490 | } |
| 491 | |
| 492 | ASSERT(!(force_control && !dest->is_used())); |
| 493 | ASSERT(dest->is_used() || frame_->height() == original_height + 1); |
| 494 | } |
| 495 | |
| 496 | |
| 497 | void CodeGenerator::LoadAndSpill(Expression* expression) { |
| 498 | ASSERT(in_spilled_code()); |
| 499 | set_in_spilled_code(false); |
| 500 | Load(expression); |
| 501 | frame_->SpillAll(); |
| 502 | set_in_spilled_code(true); |
| 503 | } |
| 504 | |
| 505 | |
| 506 | void CodeGenerator::Load(Expression* expr) { |
| 507 | #ifdef DEBUG |
| 508 | int original_height = frame_->height(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 509 | #endif |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 510 | ASSERT(!in_spilled_code()); |
| 511 | JumpTarget true_target; |
| 512 | JumpTarget false_target; |
| 513 | ControlDestination dest(&true_target, &false_target, true); |
| 514 | LoadCondition(expr, &dest, false); |
| 515 | |
| 516 | if (dest.false_was_fall_through()) { |
| 517 | // The false target was just bound. |
| 518 | JumpTarget loaded; |
| 519 | frame_->Push(Factory::false_value()); |
| 520 | // There may be dangling jumps to the true target. |
| 521 | if (true_target.is_linked()) { |
| 522 | loaded.Jump(); |
| 523 | true_target.Bind(); |
| 524 | frame_->Push(Factory::true_value()); |
| 525 | loaded.Bind(); |
| 526 | } |
| 527 | |
| 528 | } else if (dest.is_used()) { |
| 529 | // There is true, and possibly false, control flow (with true as |
| 530 | // the fall through). |
| 531 | JumpTarget loaded; |
| 532 | frame_->Push(Factory::true_value()); |
| 533 | if (false_target.is_linked()) { |
| 534 | loaded.Jump(); |
| 535 | false_target.Bind(); |
| 536 | frame_->Push(Factory::false_value()); |
| 537 | loaded.Bind(); |
| 538 | } |
| 539 | |
| 540 | } else { |
| 541 | // We have a valid value on top of the frame, but we still may |
| 542 | // have dangling jumps to the true and false targets from nested |
| 543 | // subexpressions (eg, the left subexpressions of the |
| 544 | // short-circuited boolean operators). |
| 545 | ASSERT(has_valid_frame()); |
| 546 | if (true_target.is_linked() || false_target.is_linked()) { |
| 547 | JumpTarget loaded; |
| 548 | loaded.Jump(); // Don't lose the current TOS. |
| 549 | if (true_target.is_linked()) { |
| 550 | true_target.Bind(); |
| 551 | frame_->Push(Factory::true_value()); |
| 552 | if (false_target.is_linked()) { |
| 553 | loaded.Jump(); |
| 554 | } |
| 555 | } |
| 556 | if (false_target.is_linked()) { |
| 557 | false_target.Bind(); |
| 558 | frame_->Push(Factory::false_value()); |
| 559 | } |
| 560 | loaded.Bind(); |
| 561 | } |
| 562 | } |
| 563 | |
| 564 | ASSERT(has_valid_frame()); |
| 565 | ASSERT(frame_->height() == original_height + 1); |
| 566 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 567 | |
| 568 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 569 | void CodeGenerator::LoadGlobal() { |
| 570 | if (in_spilled_code()) { |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 571 | frame_->EmitPush(GlobalObjectOperand()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 572 | } else { |
| 573 | Result temp = allocator_->Allocate(); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 574 | __ movq(temp.reg(), GlobalObjectOperand()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 575 | frame_->Push(&temp); |
| 576 | } |
| 577 | } |
| 578 | |
| 579 | |
| 580 | void CodeGenerator::LoadGlobalReceiver() { |
| 581 | Result temp = allocator_->Allocate(); |
| 582 | Register reg = temp.reg(); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 583 | __ movq(reg, GlobalObjectOperand()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 584 | __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset)); |
| 585 | frame_->Push(&temp); |
| 586 | } |
| 587 | |
| 588 | |
| 589 | void CodeGenerator::LoadTypeofExpression(Expression* expr) { |
| 590 | // Special handling of identifiers as subexpressions of typeof. |
| 591 | Variable* variable = expr->AsVariableProxy()->AsVariable(); |
| 592 | if (variable != NULL && !variable->is_this() && variable->is_global()) { |
| 593 | // For a global variable we build the property reference |
| 594 | // <global>.<variable> and perform a (regular non-contextual) property |
| 595 | // load to make sure we do not get reference errors. |
| 596 | Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); |
| 597 | Literal key(variable->name()); |
| 598 | Property property(&global, &key, RelocInfo::kNoPosition); |
| 599 | Reference ref(this, &property); |
| 600 | ref.GetValue(); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 601 | } else if (variable != NULL && variable->AsSlot() != NULL) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 602 | // For a variable that rewrites to a slot, we signal it is the immediate |
| 603 | // subexpression of a typeof. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 604 | LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 605 | } else { |
| 606 | // Anything else can be handled normally. |
| 607 | Load(expr); |
| 608 | } |
| 609 | } |
| 610 | |
| 611 | |
| 612 | ArgumentsAllocationMode CodeGenerator::ArgumentsMode() { |
| 613 | if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION; |
| 614 | ASSERT(scope()->arguments_shadow() != NULL); |
| 615 | // We don't want to do lazy arguments allocation for functions that |
| 616 | // have heap-allocated contexts, because it interfers with the |
| 617 | // uninitialized const tracking in the context objects. |
| 618 | return (scope()->num_heap_slots() > 0) |
| 619 | ? EAGER_ARGUMENTS_ALLOCATION |
| 620 | : LAZY_ARGUMENTS_ALLOCATION; |
| 621 | } |
| 622 | |
| 623 | |
| 624 | Result CodeGenerator::StoreArgumentsObject(bool initial) { |
| 625 | ArgumentsAllocationMode mode = ArgumentsMode(); |
| 626 | ASSERT(mode != NO_ARGUMENTS_ALLOCATION); |
| 627 | |
| 628 | Comment cmnt(masm_, "[ store arguments object"); |
| 629 | if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { |
| 630 | // When using lazy arguments allocation, we store the hole value |
| 631 | // as a sentinel indicating that the arguments object hasn't been |
| 632 | // allocated yet. |
| 633 | frame_->Push(Factory::the_hole_value()); |
| 634 | } else { |
| 635 | ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); |
| 636 | frame_->PushFunction(); |
| 637 | frame_->PushReceiverSlotAddress(); |
| 638 | frame_->Push(Smi::FromInt(scope()->num_parameters())); |
| 639 | Result result = frame_->CallStub(&stub, 3); |
| 640 | frame_->Push(&result); |
| 641 | } |
| 642 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 643 | Variable* arguments = scope()->arguments(); |
| 644 | Variable* shadow = scope()->arguments_shadow(); |
| 645 | ASSERT(arguments != NULL && arguments->AsSlot() != NULL); |
| 646 | ASSERT(shadow != NULL && shadow->AsSlot() != NULL); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 647 | JumpTarget done; |
| 648 | bool skip_arguments = false; |
| 649 | if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) { |
| 650 | // We have to skip storing into the arguments slot if it has |
| 651 | // already been written to. This can happen if the a function |
| 652 | // has a local variable named 'arguments'. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 653 | LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 654 | Result probe = frame_->Pop(); |
| 655 | if (probe.is_constant()) { |
| 656 | // We have to skip updating the arguments object if it has |
| 657 | // been assigned a proper value. |
| 658 | skip_arguments = !probe.handle()->IsTheHole(); |
| 659 | } else { |
| 660 | __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex); |
| 661 | probe.Unuse(); |
| 662 | done.Branch(not_equal); |
| 663 | } |
| 664 | } |
| 665 | if (!skip_arguments) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 666 | StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 667 | if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); |
| 668 | } |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 669 | StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 670 | return frame_->Pop(); |
| 671 | } |
| 672 | |
| 673 | //------------------------------------------------------------------------------ |
| 674 | // CodeGenerator implementation of variables, lookups, and stores. |
| 675 | |
| 676 | Reference::Reference(CodeGenerator* cgen, |
| 677 | Expression* expression, |
| 678 | bool persist_after_get) |
| 679 | : cgen_(cgen), |
| 680 | expression_(expression), |
| 681 | type_(ILLEGAL), |
| 682 | persist_after_get_(persist_after_get) { |
| 683 | cgen->LoadReference(this); |
| 684 | } |
| 685 | |
| 686 | |
| 687 | Reference::~Reference() { |
| 688 | ASSERT(is_unloaded() || is_illegal()); |
| 689 | } |
| 690 | |
| 691 | |
| 692 | void CodeGenerator::LoadReference(Reference* ref) { |
| 693 | // References are loaded from both spilled and unspilled code. Set the |
| 694 | // state to unspilled to allow that (and explicitly spill after |
| 695 | // construction at the construction sites). |
| 696 | bool was_in_spilled_code = in_spilled_code_; |
| 697 | in_spilled_code_ = false; |
| 698 | |
| 699 | Comment cmnt(masm_, "[ LoadReference"); |
| 700 | Expression* e = ref->expression(); |
| 701 | Property* property = e->AsProperty(); |
| 702 | Variable* var = e->AsVariableProxy()->AsVariable(); |
| 703 | |
| 704 | if (property != NULL) { |
| 705 | // The expression is either a property or a variable proxy that rewrites |
| 706 | // to a property. |
| 707 | Load(property->obj()); |
| 708 | if (property->key()->IsPropertyName()) { |
| 709 | ref->set_type(Reference::NAMED); |
| 710 | } else { |
| 711 | Load(property->key()); |
| 712 | ref->set_type(Reference::KEYED); |
| 713 | } |
| 714 | } else if (var != NULL) { |
| 715 | // The expression is a variable proxy that does not rewrite to a |
| 716 | // property. Global variables are treated as named property references. |
| 717 | if (var->is_global()) { |
| 718 | // If rax is free, the register allocator prefers it. Thus the code |
| 719 | // generator will load the global object into rax, which is where |
| 720 | // LoadIC wants it. Most uses of Reference call LoadIC directly |
| 721 | // after the reference is created. |
| 722 | frame_->Spill(rax); |
| 723 | LoadGlobal(); |
| 724 | ref->set_type(Reference::NAMED); |
| 725 | } else { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 726 | ASSERT(var->AsSlot() != NULL); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 727 | ref->set_type(Reference::SLOT); |
| 728 | } |
| 729 | } else { |
| 730 | // Anything else is a runtime error. |
| 731 | Load(e); |
| 732 | frame_->CallRuntime(Runtime::kThrowReferenceError, 1); |
| 733 | } |
| 734 | |
| 735 | in_spilled_code_ = was_in_spilled_code; |
| 736 | } |
| 737 | |
| 738 | |
| 739 | void CodeGenerator::UnloadReference(Reference* ref) { |
| 740 | // Pop a reference from the stack while preserving TOS. |
| 741 | Comment cmnt(masm_, "[ UnloadReference"); |
| 742 | frame_->Nip(ref->size()); |
| 743 | ref->set_unloaded(); |
| 744 | } |
| 745 | |
| 746 | |
| 747 | // ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and |
| 748 | // convert it to a boolean in the condition code register or jump to |
| 749 | // 'false_target'/'true_target' as appropriate. |
| 750 | void CodeGenerator::ToBoolean(ControlDestination* dest) { |
| 751 | Comment cmnt(masm_, "[ ToBoolean"); |
| 752 | |
| 753 | // The value to convert should be popped from the frame. |
| 754 | Result value = frame_->Pop(); |
| 755 | value.ToRegister(); |
| 756 | |
| 757 | if (value.is_number()) { |
| 758 | // Fast case if TypeInfo indicates only numbers. |
| 759 | if (FLAG_debug_code) { |
| 760 | __ AbortIfNotNumber(value.reg()); |
| 761 | } |
| 762 | // Smi => false iff zero. |
| 763 | __ SmiCompare(value.reg(), Smi::FromInt(0)); |
| 764 | if (value.is_smi()) { |
| 765 | value.Unuse(); |
| 766 | dest->Split(not_zero); |
| 767 | } else { |
| 768 | dest->false_target()->Branch(equal); |
| 769 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 770 | dest->true_target()->Branch(is_smi); |
| 771 | __ xorpd(xmm0, xmm0); |
| 772 | __ ucomisd(xmm0, FieldOperand(value.reg(), HeapNumber::kValueOffset)); |
| 773 | value.Unuse(); |
| 774 | dest->Split(not_zero); |
| 775 | } |
| 776 | } else { |
| 777 | // Fast case checks. |
| 778 | // 'false' => false. |
| 779 | __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex); |
| 780 | dest->false_target()->Branch(equal); |
| 781 | |
| 782 | // 'true' => true. |
| 783 | __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex); |
| 784 | dest->true_target()->Branch(equal); |
| 785 | |
| 786 | // 'undefined' => false. |
| 787 | __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex); |
| 788 | dest->false_target()->Branch(equal); |
| 789 | |
| 790 | // Smi => false iff zero. |
| 791 | __ SmiCompare(value.reg(), Smi::FromInt(0)); |
| 792 | dest->false_target()->Branch(equal); |
| 793 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 794 | dest->true_target()->Branch(is_smi); |
| 795 | |
| 796 | // Call the stub for all other cases. |
| 797 | frame_->Push(&value); // Undo the Pop() from above. |
| 798 | ToBooleanStub stub; |
| 799 | Result temp = frame_->CallStub(&stub, 1); |
| 800 | // Convert the result to a condition code. |
| 801 | __ testq(temp.reg(), temp.reg()); |
| 802 | temp.Unuse(); |
| 803 | dest->Split(not_equal); |
| 804 | } |
| 805 | } |
| 806 | |
| 807 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 808 | // Call the specialized stub for a binary operation. |
| 809 | class DeferredInlineBinaryOperation: public DeferredCode { |
| 810 | public: |
| 811 | DeferredInlineBinaryOperation(Token::Value op, |
| 812 | Register dst, |
| 813 | Register left, |
| 814 | Register right, |
| 815 | OverwriteMode mode) |
| 816 | : op_(op), dst_(dst), left_(left), right_(right), mode_(mode) { |
| 817 | set_comment("[ DeferredInlineBinaryOperation"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 818 | } |
| 819 | |
| 820 | virtual void Generate(); |
| 821 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 822 | private: |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 823 | Token::Value op_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 824 | Register dst_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 825 | Register left_; |
| 826 | Register right_; |
| 827 | OverwriteMode mode_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 828 | }; |
| 829 | |
| 830 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 831 | void DeferredInlineBinaryOperation::Generate() { |
| 832 | Label done; |
| 833 | if ((op_ == Token::ADD) |
| 834 | || (op_ == Token::SUB) |
| 835 | || (op_ == Token::MUL) |
| 836 | || (op_ == Token::DIV)) { |
| 837 | Label call_runtime; |
| 838 | Label left_smi, right_smi, load_right, do_op; |
| 839 | __ JumpIfSmi(left_, &left_smi); |
| 840 | __ CompareRoot(FieldOperand(left_, HeapObject::kMapOffset), |
| 841 | Heap::kHeapNumberMapRootIndex); |
| 842 | __ j(not_equal, &call_runtime); |
| 843 | __ movsd(xmm0, FieldOperand(left_, HeapNumber::kValueOffset)); |
| 844 | if (mode_ == OVERWRITE_LEFT) { |
| 845 | __ movq(dst_, left_); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 846 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 847 | __ jmp(&load_right); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 848 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 849 | __ bind(&left_smi); |
| 850 | __ SmiToInteger32(left_, left_); |
| 851 | __ cvtlsi2sd(xmm0, left_); |
| 852 | __ Integer32ToSmi(left_, left_); |
| 853 | if (mode_ == OVERWRITE_LEFT) { |
| 854 | Label alloc_failure; |
| 855 | __ AllocateHeapNumber(dst_, no_reg, &call_runtime); |
| 856 | } |
| 857 | |
| 858 | __ bind(&load_right); |
| 859 | __ JumpIfSmi(right_, &right_smi); |
| 860 | __ CompareRoot(FieldOperand(right_, HeapObject::kMapOffset), |
| 861 | Heap::kHeapNumberMapRootIndex); |
| 862 | __ j(not_equal, &call_runtime); |
| 863 | __ movsd(xmm1, FieldOperand(right_, HeapNumber::kValueOffset)); |
| 864 | if (mode_ == OVERWRITE_RIGHT) { |
| 865 | __ movq(dst_, right_); |
| 866 | } else if (mode_ == NO_OVERWRITE) { |
| 867 | Label alloc_failure; |
| 868 | __ AllocateHeapNumber(dst_, no_reg, &call_runtime); |
| 869 | } |
| 870 | __ jmp(&do_op); |
| 871 | |
| 872 | __ bind(&right_smi); |
| 873 | __ SmiToInteger32(right_, right_); |
| 874 | __ cvtlsi2sd(xmm1, right_); |
| 875 | __ Integer32ToSmi(right_, right_); |
| 876 | if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) { |
| 877 | Label alloc_failure; |
| 878 | __ AllocateHeapNumber(dst_, no_reg, &call_runtime); |
| 879 | } |
| 880 | |
| 881 | __ bind(&do_op); |
| 882 | switch (op_) { |
| 883 | case Token::ADD: __ addsd(xmm0, xmm1); break; |
| 884 | case Token::SUB: __ subsd(xmm0, xmm1); break; |
| 885 | case Token::MUL: __ mulsd(xmm0, xmm1); break; |
| 886 | case Token::DIV: __ divsd(xmm0, xmm1); break; |
| 887 | default: UNREACHABLE(); |
| 888 | } |
| 889 | __ movsd(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0); |
| 890 | __ jmp(&done); |
| 891 | |
| 892 | __ bind(&call_runtime); |
| 893 | } |
| 894 | GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB); |
| 895 | stub.GenerateCall(masm_, left_, right_); |
| 896 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 897 | __ bind(&done); |
| 898 | } |
| 899 | |
| 900 | |
| 901 | static TypeInfo CalculateTypeInfo(TypeInfo operands_type, |
| 902 | Token::Value op, |
| 903 | const Result& right, |
| 904 | const Result& left) { |
| 905 | // Set TypeInfo of result according to the operation performed. |
| 906 | // We rely on the fact that smis have a 32 bit payload on x64. |
| 907 | STATIC_ASSERT(kSmiValueSize == 32); |
| 908 | switch (op) { |
| 909 | case Token::COMMA: |
| 910 | return right.type_info(); |
| 911 | case Token::OR: |
| 912 | case Token::AND: |
| 913 | // Result type can be either of the two input types. |
| 914 | return operands_type; |
| 915 | case Token::BIT_OR: |
| 916 | case Token::BIT_XOR: |
| 917 | case Token::BIT_AND: |
| 918 | // Result is always a smi. |
| 919 | return TypeInfo::Smi(); |
| 920 | case Token::SAR: |
| 921 | case Token::SHL: |
| 922 | // Result is always a smi. |
| 923 | return TypeInfo::Smi(); |
| 924 | case Token::SHR: |
| 925 | // Result of x >>> y is always a smi if masked y >= 1, otherwise a number. |
| 926 | return (right.is_constant() && right.handle()->IsSmi() |
| 927 | && (Smi::cast(*right.handle())->value() & 0x1F) >= 1) |
| 928 | ? TypeInfo::Smi() |
| 929 | : TypeInfo::Number(); |
| 930 | case Token::ADD: |
| 931 | if (operands_type.IsNumber()) { |
| 932 | return TypeInfo::Number(); |
| 933 | } else if (left.type_info().IsString() || right.type_info().IsString()) { |
| 934 | return TypeInfo::String(); |
| 935 | } else { |
| 936 | return TypeInfo::Unknown(); |
| 937 | } |
| 938 | case Token::SUB: |
| 939 | case Token::MUL: |
| 940 | case Token::DIV: |
| 941 | case Token::MOD: |
| 942 | // Result is always a number. |
| 943 | return TypeInfo::Number(); |
| 944 | default: |
| 945 | UNREACHABLE(); |
| 946 | } |
| 947 | UNREACHABLE(); |
| 948 | return TypeInfo::Unknown(); |
| 949 | } |
| 950 | |
| 951 | |
| 952 | void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr, |
| 953 | OverwriteMode overwrite_mode) { |
| 954 | Comment cmnt(masm_, "[ BinaryOperation"); |
| 955 | Token::Value op = expr->op(); |
| 956 | Comment cmnt_token(masm_, Token::String(op)); |
| 957 | |
| 958 | if (op == Token::COMMA) { |
| 959 | // Simply discard left value. |
| 960 | frame_->Nip(1); |
| 961 | return; |
| 962 | } |
| 963 | |
| 964 | Result right = frame_->Pop(); |
| 965 | Result left = frame_->Pop(); |
| 966 | |
| 967 | if (op == Token::ADD) { |
| 968 | const bool left_is_string = left.type_info().IsString(); |
| 969 | const bool right_is_string = right.type_info().IsString(); |
| 970 | // Make sure constant strings have string type info. |
| 971 | ASSERT(!(left.is_constant() && left.handle()->IsString()) || |
| 972 | left_is_string); |
| 973 | ASSERT(!(right.is_constant() && right.handle()->IsString()) || |
| 974 | right_is_string); |
| 975 | if (left_is_string || right_is_string) { |
| 976 | frame_->Push(&left); |
| 977 | frame_->Push(&right); |
| 978 | Result answer; |
| 979 | if (left_is_string) { |
| 980 | if (right_is_string) { |
| 981 | StringAddStub stub(NO_STRING_CHECK_IN_STUB); |
| 982 | answer = frame_->CallStub(&stub, 2); |
| 983 | } else { |
| 984 | answer = |
| 985 | frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2); |
| 986 | } |
| 987 | } else if (right_is_string) { |
| 988 | answer = |
| 989 | frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2); |
| 990 | } |
| 991 | answer.set_type_info(TypeInfo::String()); |
| 992 | frame_->Push(&answer); |
| 993 | return; |
| 994 | } |
| 995 | // Neither operand is known to be a string. |
| 996 | } |
| 997 | |
| 998 | bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi(); |
| 999 | bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi(); |
| 1000 | bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi(); |
| 1001 | bool right_is_non_smi_constant = |
| 1002 | right.is_constant() && !right.handle()->IsSmi(); |
| 1003 | |
| 1004 | if (left_is_smi_constant && right_is_smi_constant) { |
| 1005 | // Compute the constant result at compile time, and leave it on the frame. |
| 1006 | int left_int = Smi::cast(*left.handle())->value(); |
| 1007 | int right_int = Smi::cast(*right.handle())->value(); |
| 1008 | if (FoldConstantSmis(op, left_int, right_int)) return; |
| 1009 | } |
| 1010 | |
| 1011 | // Get number type of left and right sub-expressions. |
| 1012 | TypeInfo operands_type = |
| 1013 | TypeInfo::Combine(left.type_info(), right.type_info()); |
| 1014 | |
| 1015 | TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left); |
| 1016 | |
| 1017 | Result answer; |
| 1018 | if (left_is_non_smi_constant || right_is_non_smi_constant) { |
| 1019 | // Go straight to the slow case, with no smi code. |
| 1020 | GenericBinaryOpStub stub(op, |
| 1021 | overwrite_mode, |
| 1022 | NO_SMI_CODE_IN_STUB, |
| 1023 | operands_type); |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 1024 | answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1025 | } else if (right_is_smi_constant) { |
| 1026 | answer = ConstantSmiBinaryOperation(expr, &left, right.handle(), |
| 1027 | false, overwrite_mode); |
| 1028 | } else if (left_is_smi_constant) { |
| 1029 | answer = ConstantSmiBinaryOperation(expr, &right, left.handle(), |
| 1030 | true, overwrite_mode); |
| 1031 | } else { |
| 1032 | // Set the flags based on the operation, type and loop nesting level. |
| 1033 | // Bit operations always assume they likely operate on Smis. Still only |
| 1034 | // generate the inline Smi check code if this operation is part of a loop. |
| 1035 | // For all other operations only inline the Smi check code for likely smis |
| 1036 | // if the operation is part of a loop. |
| 1037 | if (loop_nesting() > 0 && |
| 1038 | (Token::IsBitOp(op) || |
| 1039 | operands_type.IsInteger32() || |
| 1040 | expr->type()->IsLikelySmi())) { |
| 1041 | answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode); |
| 1042 | } else { |
| 1043 | GenericBinaryOpStub stub(op, |
| 1044 | overwrite_mode, |
| 1045 | NO_GENERIC_BINARY_FLAGS, |
| 1046 | operands_type); |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 1047 | answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1048 | } |
| 1049 | } |
| 1050 | |
| 1051 | answer.set_type_info(result_type); |
| 1052 | frame_->Push(&answer); |
| 1053 | } |
| 1054 | |
| 1055 | |
| 1056 | bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { |
| 1057 | Object* answer_object = Heap::undefined_value(); |
| 1058 | switch (op) { |
| 1059 | case Token::ADD: |
| 1060 | // Use intptr_t to detect overflow of 32-bit int. |
| 1061 | if (Smi::IsValid(static_cast<intptr_t>(left) + right)) { |
| 1062 | answer_object = Smi::FromInt(left + right); |
| 1063 | } |
| 1064 | break; |
| 1065 | case Token::SUB: |
| 1066 | // Use intptr_t to detect overflow of 32-bit int. |
| 1067 | if (Smi::IsValid(static_cast<intptr_t>(left) - right)) { |
| 1068 | answer_object = Smi::FromInt(left - right); |
| 1069 | } |
| 1070 | break; |
| 1071 | case Token::MUL: { |
| 1072 | double answer = static_cast<double>(left) * right; |
| 1073 | if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) { |
| 1074 | // If the product is zero and the non-zero factor is negative, |
| 1075 | // the spec requires us to return floating point negative zero. |
| 1076 | if (answer != 0 || (left >= 0 && right >= 0)) { |
| 1077 | answer_object = Smi::FromInt(static_cast<int>(answer)); |
| 1078 | } |
| 1079 | } |
| 1080 | } |
| 1081 | break; |
| 1082 | case Token::DIV: |
| 1083 | case Token::MOD: |
| 1084 | break; |
| 1085 | case Token::BIT_OR: |
| 1086 | answer_object = Smi::FromInt(left | right); |
| 1087 | break; |
| 1088 | case Token::BIT_AND: |
| 1089 | answer_object = Smi::FromInt(left & right); |
| 1090 | break; |
| 1091 | case Token::BIT_XOR: |
| 1092 | answer_object = Smi::FromInt(left ^ right); |
| 1093 | break; |
| 1094 | |
| 1095 | case Token::SHL: { |
| 1096 | int shift_amount = right & 0x1F; |
| 1097 | if (Smi::IsValid(left << shift_amount)) { |
| 1098 | answer_object = Smi::FromInt(left << shift_amount); |
| 1099 | } |
| 1100 | break; |
| 1101 | } |
| 1102 | case Token::SHR: { |
| 1103 | int shift_amount = right & 0x1F; |
| 1104 | unsigned int unsigned_left = left; |
| 1105 | unsigned_left >>= shift_amount; |
| 1106 | if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) { |
| 1107 | answer_object = Smi::FromInt(unsigned_left); |
| 1108 | } |
| 1109 | break; |
| 1110 | } |
| 1111 | case Token::SAR: { |
| 1112 | int shift_amount = right & 0x1F; |
| 1113 | unsigned int unsigned_left = left; |
| 1114 | if (left < 0) { |
| 1115 | // Perform arithmetic shift of a negative number by |
| 1116 | // complementing number, logical shifting, complementing again. |
| 1117 | unsigned_left = ~unsigned_left; |
| 1118 | unsigned_left >>= shift_amount; |
| 1119 | unsigned_left = ~unsigned_left; |
| 1120 | } else { |
| 1121 | unsigned_left >>= shift_amount; |
| 1122 | } |
| 1123 | ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left))); |
| 1124 | answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left)); |
| 1125 | break; |
| 1126 | } |
| 1127 | default: |
| 1128 | UNREACHABLE(); |
| 1129 | break; |
| 1130 | } |
| 1131 | if (answer_object == Heap::undefined_value()) { |
| 1132 | return false; |
| 1133 | } |
| 1134 | frame_->Push(Handle<Object>(answer_object)); |
| 1135 | return true; |
| 1136 | } |
| 1137 | |
| 1138 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 1139 | void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left, |
| 1140 | Result* right, |
| 1141 | JumpTarget* both_smi) { |
| 1142 | TypeInfo left_info = left->type_info(); |
| 1143 | TypeInfo right_info = right->type_info(); |
| 1144 | if (left_info.IsDouble() || left_info.IsString() || |
| 1145 | right_info.IsDouble() || right_info.IsString()) { |
| 1146 | // We know that left and right are not both smi. Don't do any tests. |
| 1147 | return; |
| 1148 | } |
| 1149 | |
| 1150 | if (left->reg().is(right->reg())) { |
| 1151 | if (!left_info.IsSmi()) { |
| 1152 | Condition is_smi = masm()->CheckSmi(left->reg()); |
| 1153 | both_smi->Branch(is_smi); |
| 1154 | } else { |
| 1155 | if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); |
| 1156 | left->Unuse(); |
| 1157 | right->Unuse(); |
| 1158 | both_smi->Jump(); |
| 1159 | } |
| 1160 | } else if (!left_info.IsSmi()) { |
| 1161 | if (!right_info.IsSmi()) { |
| 1162 | Condition is_smi = masm()->CheckBothSmi(left->reg(), right->reg()); |
| 1163 | both_smi->Branch(is_smi); |
| 1164 | } else { |
| 1165 | Condition is_smi = masm()->CheckSmi(left->reg()); |
| 1166 | both_smi->Branch(is_smi); |
| 1167 | } |
| 1168 | } else { |
| 1169 | if (FLAG_debug_code) __ AbortIfNotSmi(left->reg()); |
| 1170 | if (!right_info.IsSmi()) { |
| 1171 | Condition is_smi = masm()->CheckSmi(right->reg()); |
| 1172 | both_smi->Branch(is_smi); |
| 1173 | } else { |
| 1174 | if (FLAG_debug_code) __ AbortIfNotSmi(right->reg()); |
| 1175 | left->Unuse(); |
| 1176 | right->Unuse(); |
| 1177 | both_smi->Jump(); |
| 1178 | } |
| 1179 | } |
| 1180 | } |
| 1181 | |
| 1182 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1183 | void CodeGenerator::JumpIfNotSmiUsingTypeInfo(Register reg, |
| 1184 | TypeInfo type, |
| 1185 | DeferredCode* deferred) { |
| 1186 | if (!type.IsSmi()) { |
| 1187 | __ JumpIfNotSmi(reg, deferred->entry_label()); |
| 1188 | } |
| 1189 | if (FLAG_debug_code) { |
| 1190 | __ AbortIfNotSmi(reg); |
| 1191 | } |
| 1192 | } |
| 1193 | |
| 1194 | |
| 1195 | void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left, |
| 1196 | Register right, |
| 1197 | TypeInfo left_info, |
| 1198 | TypeInfo right_info, |
| 1199 | DeferredCode* deferred) { |
| 1200 | if (!left_info.IsSmi() && !right_info.IsSmi()) { |
| 1201 | __ JumpIfNotBothSmi(left, right, deferred->entry_label()); |
| 1202 | } else if (!left_info.IsSmi()) { |
| 1203 | __ JumpIfNotSmi(left, deferred->entry_label()); |
| 1204 | } else if (!right_info.IsSmi()) { |
| 1205 | __ JumpIfNotSmi(right, deferred->entry_label()); |
| 1206 | } |
| 1207 | if (FLAG_debug_code) { |
| 1208 | __ AbortIfNotSmi(left); |
| 1209 | __ AbortIfNotSmi(right); |
| 1210 | } |
| 1211 | } |
| 1212 | |
| 1213 | |
| 1214 | // Implements a binary operation using a deferred code object and some |
| 1215 | // inline code to operate on smis quickly. |
| 1216 | Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr, |
| 1217 | Result* left, |
| 1218 | Result* right, |
| 1219 | OverwriteMode overwrite_mode) { |
| 1220 | // Copy the type info because left and right may be overwritten. |
| 1221 | TypeInfo left_type_info = left->type_info(); |
| 1222 | TypeInfo right_type_info = right->type_info(); |
| 1223 | Token::Value op = expr->op(); |
| 1224 | Result answer; |
| 1225 | // Special handling of div and mod because they use fixed registers. |
| 1226 | if (op == Token::DIV || op == Token::MOD) { |
| 1227 | // We need rax as the quotient register, rdx as the remainder |
| 1228 | // register, neither left nor right in rax or rdx, and left copied |
| 1229 | // to rax. |
| 1230 | Result quotient; |
| 1231 | Result remainder; |
| 1232 | bool left_is_in_rax = false; |
| 1233 | // Step 1: get rax for quotient. |
| 1234 | if ((left->is_register() && left->reg().is(rax)) || |
| 1235 | (right->is_register() && right->reg().is(rax))) { |
| 1236 | // One or both is in rax. Use a fresh non-rdx register for |
| 1237 | // them. |
| 1238 | Result fresh = allocator_->Allocate(); |
| 1239 | ASSERT(fresh.is_valid()); |
| 1240 | if (fresh.reg().is(rdx)) { |
| 1241 | remainder = fresh; |
| 1242 | fresh = allocator_->Allocate(); |
| 1243 | ASSERT(fresh.is_valid()); |
| 1244 | } |
| 1245 | if (left->is_register() && left->reg().is(rax)) { |
| 1246 | quotient = *left; |
| 1247 | *left = fresh; |
| 1248 | left_is_in_rax = true; |
| 1249 | } |
| 1250 | if (right->is_register() && right->reg().is(rax)) { |
| 1251 | quotient = *right; |
| 1252 | *right = fresh; |
| 1253 | } |
| 1254 | __ movq(fresh.reg(), rax); |
| 1255 | } else { |
| 1256 | // Neither left nor right is in rax. |
| 1257 | quotient = allocator_->Allocate(rax); |
| 1258 | } |
| 1259 | ASSERT(quotient.is_register() && quotient.reg().is(rax)); |
| 1260 | ASSERT(!(left->is_register() && left->reg().is(rax))); |
| 1261 | ASSERT(!(right->is_register() && right->reg().is(rax))); |
| 1262 | |
| 1263 | // Step 2: get rdx for remainder if necessary. |
| 1264 | if (!remainder.is_valid()) { |
| 1265 | if ((left->is_register() && left->reg().is(rdx)) || |
| 1266 | (right->is_register() && right->reg().is(rdx))) { |
| 1267 | Result fresh = allocator_->Allocate(); |
| 1268 | ASSERT(fresh.is_valid()); |
| 1269 | if (left->is_register() && left->reg().is(rdx)) { |
| 1270 | remainder = *left; |
| 1271 | *left = fresh; |
| 1272 | } |
| 1273 | if (right->is_register() && right->reg().is(rdx)) { |
| 1274 | remainder = *right; |
| 1275 | *right = fresh; |
| 1276 | } |
| 1277 | __ movq(fresh.reg(), rdx); |
| 1278 | } else { |
| 1279 | // Neither left nor right is in rdx. |
| 1280 | remainder = allocator_->Allocate(rdx); |
| 1281 | } |
| 1282 | } |
| 1283 | ASSERT(remainder.is_register() && remainder.reg().is(rdx)); |
| 1284 | ASSERT(!(left->is_register() && left->reg().is(rdx))); |
| 1285 | ASSERT(!(right->is_register() && right->reg().is(rdx))); |
| 1286 | |
| 1287 | left->ToRegister(); |
| 1288 | right->ToRegister(); |
| 1289 | frame_->Spill(rax); |
| 1290 | frame_->Spill(rdx); |
| 1291 | |
| 1292 | // Check that left and right are smi tagged. |
| 1293 | DeferredInlineBinaryOperation* deferred = |
| 1294 | new DeferredInlineBinaryOperation(op, |
| 1295 | (op == Token::DIV) ? rax : rdx, |
| 1296 | left->reg(), |
| 1297 | right->reg(), |
| 1298 | overwrite_mode); |
| 1299 | JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), |
| 1300 | left_type_info, right_type_info, deferred); |
| 1301 | |
| 1302 | if (op == Token::DIV) { |
| 1303 | __ SmiDiv(rax, left->reg(), right->reg(), deferred->entry_label()); |
| 1304 | deferred->BindExit(); |
| 1305 | left->Unuse(); |
| 1306 | right->Unuse(); |
| 1307 | answer = quotient; |
| 1308 | } else { |
| 1309 | ASSERT(op == Token::MOD); |
| 1310 | __ SmiMod(rdx, left->reg(), right->reg(), deferred->entry_label()); |
| 1311 | deferred->BindExit(); |
| 1312 | left->Unuse(); |
| 1313 | right->Unuse(); |
| 1314 | answer = remainder; |
| 1315 | } |
| 1316 | ASSERT(answer.is_valid()); |
| 1317 | return answer; |
| 1318 | } |
| 1319 | |
| 1320 | // Special handling of shift operations because they use fixed |
| 1321 | // registers. |
| 1322 | if (op == Token::SHL || op == Token::SHR || op == Token::SAR) { |
| 1323 | // Move left out of rcx if necessary. |
| 1324 | if (left->is_register() && left->reg().is(rcx)) { |
| 1325 | *left = allocator_->Allocate(); |
| 1326 | ASSERT(left->is_valid()); |
| 1327 | __ movq(left->reg(), rcx); |
| 1328 | } |
| 1329 | right->ToRegister(rcx); |
| 1330 | left->ToRegister(); |
| 1331 | ASSERT(left->is_register() && !left->reg().is(rcx)); |
| 1332 | ASSERT(right->is_register() && right->reg().is(rcx)); |
| 1333 | |
| 1334 | // We will modify right, it must be spilled. |
| 1335 | frame_->Spill(rcx); |
| 1336 | |
| 1337 | // Use a fresh answer register to avoid spilling the left operand. |
| 1338 | answer = allocator_->Allocate(); |
| 1339 | ASSERT(answer.is_valid()); |
| 1340 | // Check that both operands are smis using the answer register as a |
| 1341 | // temporary. |
| 1342 | DeferredInlineBinaryOperation* deferred = |
| 1343 | new DeferredInlineBinaryOperation(op, |
| 1344 | answer.reg(), |
| 1345 | left->reg(), |
| 1346 | rcx, |
| 1347 | overwrite_mode); |
| 1348 | |
| 1349 | Label do_op; |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1350 | // Left operand must be unchanged in left->reg() for deferred code. |
| 1351 | // Left operand is in answer.reg(), possibly converted to int32, for |
| 1352 | // inline code. |
| 1353 | __ movq(answer.reg(), left->reg()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1354 | if (right_type_info.IsSmi()) { |
| 1355 | if (FLAG_debug_code) { |
| 1356 | __ AbortIfNotSmi(right->reg()); |
| 1357 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1358 | // If left is not known to be a smi, check if it is. |
| 1359 | // If left is not known to be a number, and it isn't a smi, check if |
| 1360 | // it is a HeapNumber. |
| 1361 | if (!left_type_info.IsSmi()) { |
| 1362 | __ JumpIfSmi(answer.reg(), &do_op); |
| 1363 | if (!left_type_info.IsNumber()) { |
| 1364 | // Branch if not a heapnumber. |
| 1365 | __ Cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset), |
| 1366 | Factory::heap_number_map()); |
| 1367 | deferred->Branch(not_equal); |
| 1368 | } |
| 1369 | // Load integer value into answer register using truncation. |
| 1370 | __ cvttsd2si(answer.reg(), |
| 1371 | FieldOperand(answer.reg(), HeapNumber::kValueOffset)); |
| 1372 | // Branch if we might have overflowed. |
| 1373 | // (False negative for Smi::kMinValue) |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1374 | __ cmpl(answer.reg(), Immediate(0x80000000)); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1375 | deferred->Branch(equal); |
| 1376 | // TODO(lrn): Inline shifts on int32 here instead of first smi-tagging. |
| 1377 | __ Integer32ToSmi(answer.reg(), answer.reg()); |
| 1378 | } else { |
| 1379 | // Fast case - both are actually smis. |
| 1380 | if (FLAG_debug_code) { |
| 1381 | __ AbortIfNotSmi(left->reg()); |
| 1382 | } |
| 1383 | } |
| 1384 | } else { |
| 1385 | JumpIfNotBothSmiUsingTypeInfo(left->reg(), rcx, |
| 1386 | left_type_info, right_type_info, deferred); |
| 1387 | } |
| 1388 | __ bind(&do_op); |
| 1389 | |
| 1390 | // Perform the operation. |
| 1391 | switch (op) { |
| 1392 | case Token::SAR: |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1393 | __ SmiShiftArithmeticRight(answer.reg(), answer.reg(), rcx); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1394 | break; |
| 1395 | case Token::SHR: { |
| 1396 | __ SmiShiftLogicalRight(answer.reg(), |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1397 | answer.reg(), |
| 1398 | rcx, |
| 1399 | deferred->entry_label()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1400 | break; |
| 1401 | } |
| 1402 | case Token::SHL: { |
| 1403 | __ SmiShiftLeft(answer.reg(), |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1404 | answer.reg(), |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1405 | rcx); |
| 1406 | break; |
| 1407 | } |
| 1408 | default: |
| 1409 | UNREACHABLE(); |
| 1410 | } |
| 1411 | deferred->BindExit(); |
| 1412 | left->Unuse(); |
| 1413 | right->Unuse(); |
| 1414 | ASSERT(answer.is_valid()); |
| 1415 | return answer; |
| 1416 | } |
| 1417 | |
| 1418 | // Handle the other binary operations. |
| 1419 | left->ToRegister(); |
| 1420 | right->ToRegister(); |
| 1421 | // A newly allocated register answer is used to hold the answer. The |
| 1422 | // registers containing left and right are not modified so they don't |
| 1423 | // need to be spilled in the fast case. |
| 1424 | answer = allocator_->Allocate(); |
| 1425 | ASSERT(answer.is_valid()); |
| 1426 | |
| 1427 | // Perform the smi tag check. |
| 1428 | DeferredInlineBinaryOperation* deferred = |
| 1429 | new DeferredInlineBinaryOperation(op, |
| 1430 | answer.reg(), |
| 1431 | left->reg(), |
| 1432 | right->reg(), |
| 1433 | overwrite_mode); |
| 1434 | JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), |
| 1435 | left_type_info, right_type_info, deferred); |
| 1436 | |
| 1437 | switch (op) { |
| 1438 | case Token::ADD: |
| 1439 | __ SmiAdd(answer.reg(), |
| 1440 | left->reg(), |
| 1441 | right->reg(), |
| 1442 | deferred->entry_label()); |
| 1443 | break; |
| 1444 | |
| 1445 | case Token::SUB: |
| 1446 | __ SmiSub(answer.reg(), |
| 1447 | left->reg(), |
| 1448 | right->reg(), |
| 1449 | deferred->entry_label()); |
| 1450 | break; |
| 1451 | |
| 1452 | case Token::MUL: { |
| 1453 | __ SmiMul(answer.reg(), |
| 1454 | left->reg(), |
| 1455 | right->reg(), |
| 1456 | deferred->entry_label()); |
| 1457 | break; |
| 1458 | } |
| 1459 | |
| 1460 | case Token::BIT_OR: |
| 1461 | __ SmiOr(answer.reg(), left->reg(), right->reg()); |
| 1462 | break; |
| 1463 | |
| 1464 | case Token::BIT_AND: |
| 1465 | __ SmiAnd(answer.reg(), left->reg(), right->reg()); |
| 1466 | break; |
| 1467 | |
| 1468 | case Token::BIT_XOR: |
| 1469 | __ SmiXor(answer.reg(), left->reg(), right->reg()); |
| 1470 | break; |
| 1471 | |
| 1472 | default: |
| 1473 | UNREACHABLE(); |
| 1474 | break; |
| 1475 | } |
| 1476 | deferred->BindExit(); |
| 1477 | left->Unuse(); |
| 1478 | right->Unuse(); |
| 1479 | ASSERT(answer.is_valid()); |
| 1480 | return answer; |
| 1481 | } |
| 1482 | |
| 1483 | |
| 1484 | // Call the appropriate binary operation stub to compute src op value |
| 1485 | // and leave the result in dst. |
| 1486 | class DeferredInlineSmiOperation: public DeferredCode { |
| 1487 | public: |
| 1488 | DeferredInlineSmiOperation(Token::Value op, |
| 1489 | Register dst, |
| 1490 | Register src, |
| 1491 | Smi* value, |
| 1492 | OverwriteMode overwrite_mode) |
| 1493 | : op_(op), |
| 1494 | dst_(dst), |
| 1495 | src_(src), |
| 1496 | value_(value), |
| 1497 | overwrite_mode_(overwrite_mode) { |
| 1498 | set_comment("[ DeferredInlineSmiOperation"); |
| 1499 | } |
| 1500 | |
| 1501 | virtual void Generate(); |
| 1502 | |
| 1503 | private: |
| 1504 | Token::Value op_; |
| 1505 | Register dst_; |
| 1506 | Register src_; |
| 1507 | Smi* value_; |
| 1508 | OverwriteMode overwrite_mode_; |
| 1509 | }; |
| 1510 | |
| 1511 | |
| 1512 | void DeferredInlineSmiOperation::Generate() { |
| 1513 | // For mod we don't generate all the Smi code inline. |
| 1514 | GenericBinaryOpStub stub( |
| 1515 | op_, |
| 1516 | overwrite_mode_, |
| 1517 | (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB); |
| 1518 | stub.GenerateCall(masm_, src_, value_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1519 | if (!dst_.is(rax)) __ movq(dst_, rax); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1520 | } |
| 1521 | |
| 1522 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1523 | // Call the appropriate binary operation stub to compute value op src |
| 1524 | // and leave the result in dst. |
| 1525 | class DeferredInlineSmiOperationReversed: public DeferredCode { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1526 | public: |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1527 | DeferredInlineSmiOperationReversed(Token::Value op, |
| 1528 | Register dst, |
| 1529 | Smi* value, |
| 1530 | Register src, |
| 1531 | OverwriteMode overwrite_mode) |
| 1532 | : op_(op), |
| 1533 | dst_(dst), |
| 1534 | value_(value), |
| 1535 | src_(src), |
| 1536 | overwrite_mode_(overwrite_mode) { |
| 1537 | set_comment("[ DeferredInlineSmiOperationReversed"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1538 | } |
| 1539 | |
| 1540 | virtual void Generate(); |
| 1541 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1542 | private: |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1543 | Token::Value op_; |
| 1544 | Register dst_; |
| 1545 | Smi* value_; |
| 1546 | Register src_; |
| 1547 | OverwriteMode overwrite_mode_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 1548 | }; |
| 1549 | |
| 1550 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1551 | void DeferredInlineSmiOperationReversed::Generate() { |
| 1552 | GenericBinaryOpStub stub( |
| 1553 | op_, |
| 1554 | overwrite_mode_, |
| 1555 | NO_SMI_CODE_IN_STUB); |
| 1556 | stub.GenerateCall(masm_, value_, src_); |
| 1557 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 1558 | } |
| 1559 | class DeferredInlineSmiAdd: public DeferredCode { |
| 1560 | public: |
| 1561 | DeferredInlineSmiAdd(Register dst, |
| 1562 | Smi* value, |
| 1563 | OverwriteMode overwrite_mode) |
| 1564 | : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) { |
| 1565 | set_comment("[ DeferredInlineSmiAdd"); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 1566 | } |
| 1567 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1568 | virtual void Generate(); |
| 1569 | |
| 1570 | private: |
| 1571 | Register dst_; |
| 1572 | Smi* value_; |
| 1573 | OverwriteMode overwrite_mode_; |
| 1574 | }; |
| 1575 | |
| 1576 | |
| 1577 | void DeferredInlineSmiAdd::Generate() { |
| 1578 | GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB); |
| 1579 | igostub.GenerateCall(masm_, dst_, value_); |
| 1580 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 1581 | } |
| 1582 | |
| 1583 | |
| 1584 | // The result of value + src is in dst. It either overflowed or was not |
| 1585 | // smi tagged. Undo the speculative addition and call the appropriate |
| 1586 | // specialized stub for add. The result is left in dst. |
| 1587 | class DeferredInlineSmiAddReversed: public DeferredCode { |
| 1588 | public: |
| 1589 | DeferredInlineSmiAddReversed(Register dst, |
| 1590 | Smi* value, |
| 1591 | OverwriteMode overwrite_mode) |
| 1592 | : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) { |
| 1593 | set_comment("[ DeferredInlineSmiAddReversed"); |
| 1594 | } |
| 1595 | |
| 1596 | virtual void Generate(); |
| 1597 | |
| 1598 | private: |
| 1599 | Register dst_; |
| 1600 | Smi* value_; |
| 1601 | OverwriteMode overwrite_mode_; |
| 1602 | }; |
| 1603 | |
| 1604 | |
| 1605 | void DeferredInlineSmiAddReversed::Generate() { |
| 1606 | GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB); |
| 1607 | igostub.GenerateCall(masm_, value_, dst_); |
| 1608 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 1609 | } |
| 1610 | |
| 1611 | |
| 1612 | class DeferredInlineSmiSub: public DeferredCode { |
| 1613 | public: |
| 1614 | DeferredInlineSmiSub(Register dst, |
| 1615 | Smi* value, |
| 1616 | OverwriteMode overwrite_mode) |
| 1617 | : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) { |
| 1618 | set_comment("[ DeferredInlineSmiSub"); |
| 1619 | } |
| 1620 | |
| 1621 | virtual void Generate(); |
| 1622 | |
| 1623 | private: |
| 1624 | Register dst_; |
| 1625 | Smi* value_; |
| 1626 | OverwriteMode overwrite_mode_; |
| 1627 | }; |
| 1628 | |
| 1629 | |
| 1630 | void DeferredInlineSmiSub::Generate() { |
| 1631 | GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB); |
| 1632 | igostub.GenerateCall(masm_, dst_, value_); |
| 1633 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 1634 | } |
| 1635 | |
| 1636 | |
| 1637 | Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr, |
| 1638 | Result* operand, |
| 1639 | Handle<Object> value, |
| 1640 | bool reversed, |
| 1641 | OverwriteMode overwrite_mode) { |
| 1642 | // Generate inline code for a binary operation when one of the |
| 1643 | // operands is a constant smi. Consumes the argument "operand". |
| 1644 | if (IsUnsafeSmi(value)) { |
| 1645 | Result unsafe_operand(value); |
| 1646 | if (reversed) { |
| 1647 | return LikelySmiBinaryOperation(expr, &unsafe_operand, operand, |
| 1648 | overwrite_mode); |
| 1649 | } else { |
| 1650 | return LikelySmiBinaryOperation(expr, operand, &unsafe_operand, |
| 1651 | overwrite_mode); |
| 1652 | } |
| 1653 | } |
| 1654 | |
| 1655 | // Get the literal value. |
| 1656 | Smi* smi_value = Smi::cast(*value); |
| 1657 | int int_value = smi_value->value(); |
| 1658 | |
| 1659 | Token::Value op = expr->op(); |
| 1660 | Result answer; |
| 1661 | switch (op) { |
| 1662 | case Token::ADD: { |
| 1663 | operand->ToRegister(); |
| 1664 | frame_->Spill(operand->reg()); |
| 1665 | DeferredCode* deferred = NULL; |
| 1666 | if (reversed) { |
| 1667 | deferred = new DeferredInlineSmiAddReversed(operand->reg(), |
| 1668 | smi_value, |
| 1669 | overwrite_mode); |
| 1670 | } else { |
| 1671 | deferred = new DeferredInlineSmiAdd(operand->reg(), |
| 1672 | smi_value, |
| 1673 | overwrite_mode); |
| 1674 | } |
| 1675 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1676 | deferred); |
| 1677 | __ SmiAddConstant(operand->reg(), |
| 1678 | operand->reg(), |
| 1679 | smi_value, |
| 1680 | deferred->entry_label()); |
| 1681 | deferred->BindExit(); |
| 1682 | answer = *operand; |
| 1683 | break; |
| 1684 | } |
| 1685 | |
| 1686 | case Token::SUB: { |
| 1687 | if (reversed) { |
| 1688 | Result constant_operand(value); |
| 1689 | answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, |
| 1690 | overwrite_mode); |
| 1691 | } else { |
| 1692 | operand->ToRegister(); |
| 1693 | frame_->Spill(operand->reg()); |
| 1694 | answer = *operand; |
| 1695 | DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(), |
| 1696 | smi_value, |
| 1697 | overwrite_mode); |
| 1698 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1699 | deferred); |
| 1700 | // A smi currently fits in a 32-bit Immediate. |
| 1701 | __ SmiSubConstant(operand->reg(), |
| 1702 | operand->reg(), |
| 1703 | smi_value, |
| 1704 | deferred->entry_label()); |
| 1705 | deferred->BindExit(); |
| 1706 | operand->Unuse(); |
| 1707 | } |
| 1708 | break; |
| 1709 | } |
| 1710 | |
| 1711 | case Token::SAR: |
| 1712 | if (reversed) { |
| 1713 | Result constant_operand(value); |
| 1714 | answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, |
| 1715 | overwrite_mode); |
| 1716 | } else { |
| 1717 | // Only the least significant 5 bits of the shift value are used. |
| 1718 | // In the slow case, this masking is done inside the runtime call. |
| 1719 | int shift_value = int_value & 0x1f; |
| 1720 | operand->ToRegister(); |
| 1721 | frame_->Spill(operand->reg()); |
| 1722 | DeferredInlineSmiOperation* deferred = |
| 1723 | new DeferredInlineSmiOperation(op, |
| 1724 | operand->reg(), |
| 1725 | operand->reg(), |
| 1726 | smi_value, |
| 1727 | overwrite_mode); |
| 1728 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1729 | deferred); |
| 1730 | __ SmiShiftArithmeticRightConstant(operand->reg(), |
| 1731 | operand->reg(), |
| 1732 | shift_value); |
| 1733 | deferred->BindExit(); |
| 1734 | answer = *operand; |
| 1735 | } |
| 1736 | break; |
| 1737 | |
| 1738 | case Token::SHR: |
| 1739 | if (reversed) { |
| 1740 | Result constant_operand(value); |
| 1741 | answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, |
| 1742 | overwrite_mode); |
| 1743 | } else { |
| 1744 | // Only the least significant 5 bits of the shift value are used. |
| 1745 | // In the slow case, this masking is done inside the runtime call. |
| 1746 | int shift_value = int_value & 0x1f; |
| 1747 | operand->ToRegister(); |
| 1748 | answer = allocator()->Allocate(); |
| 1749 | ASSERT(answer.is_valid()); |
| 1750 | DeferredInlineSmiOperation* deferred = |
| 1751 | new DeferredInlineSmiOperation(op, |
| 1752 | answer.reg(), |
| 1753 | operand->reg(), |
| 1754 | smi_value, |
| 1755 | overwrite_mode); |
| 1756 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1757 | deferred); |
| 1758 | __ SmiShiftLogicalRightConstant(answer.reg(), |
| 1759 | operand->reg(), |
| 1760 | shift_value, |
| 1761 | deferred->entry_label()); |
| 1762 | deferred->BindExit(); |
| 1763 | operand->Unuse(); |
| 1764 | } |
| 1765 | break; |
| 1766 | |
| 1767 | case Token::SHL: |
| 1768 | if (reversed) { |
| 1769 | operand->ToRegister(); |
| 1770 | |
| 1771 | // We need rcx to be available to hold operand, and to be spilled. |
| 1772 | // SmiShiftLeft implicitly modifies rcx. |
| 1773 | if (operand->reg().is(rcx)) { |
| 1774 | frame_->Spill(operand->reg()); |
| 1775 | answer = allocator()->Allocate(); |
| 1776 | } else { |
| 1777 | Result rcx_reg = allocator()->Allocate(rcx); |
| 1778 | // answer must not be rcx. |
| 1779 | answer = allocator()->Allocate(); |
| 1780 | // rcx_reg goes out of scope. |
| 1781 | } |
| 1782 | |
| 1783 | DeferredInlineSmiOperationReversed* deferred = |
| 1784 | new DeferredInlineSmiOperationReversed(op, |
| 1785 | answer.reg(), |
| 1786 | smi_value, |
| 1787 | operand->reg(), |
| 1788 | overwrite_mode); |
| 1789 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1790 | deferred); |
| 1791 | |
| 1792 | __ Move(answer.reg(), smi_value); |
| 1793 | __ SmiShiftLeft(answer.reg(), answer.reg(), operand->reg()); |
| 1794 | operand->Unuse(); |
| 1795 | |
| 1796 | deferred->BindExit(); |
| 1797 | } else { |
| 1798 | // Only the least significant 5 bits of the shift value are used. |
| 1799 | // In the slow case, this masking is done inside the runtime call. |
| 1800 | int shift_value = int_value & 0x1f; |
| 1801 | operand->ToRegister(); |
| 1802 | if (shift_value == 0) { |
| 1803 | // Spill operand so it can be overwritten in the slow case. |
| 1804 | frame_->Spill(operand->reg()); |
| 1805 | DeferredInlineSmiOperation* deferred = |
| 1806 | new DeferredInlineSmiOperation(op, |
| 1807 | operand->reg(), |
| 1808 | operand->reg(), |
| 1809 | smi_value, |
| 1810 | overwrite_mode); |
| 1811 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1812 | deferred); |
| 1813 | deferred->BindExit(); |
| 1814 | answer = *operand; |
| 1815 | } else { |
| 1816 | // Use a fresh temporary for nonzero shift values. |
| 1817 | answer = allocator()->Allocate(); |
| 1818 | ASSERT(answer.is_valid()); |
| 1819 | DeferredInlineSmiOperation* deferred = |
| 1820 | new DeferredInlineSmiOperation(op, |
| 1821 | answer.reg(), |
| 1822 | operand->reg(), |
| 1823 | smi_value, |
| 1824 | overwrite_mode); |
| 1825 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1826 | deferred); |
| 1827 | __ SmiShiftLeftConstant(answer.reg(), |
| 1828 | operand->reg(), |
| 1829 | shift_value); |
| 1830 | deferred->BindExit(); |
| 1831 | operand->Unuse(); |
| 1832 | } |
| 1833 | } |
| 1834 | break; |
| 1835 | |
| 1836 | case Token::BIT_OR: |
| 1837 | case Token::BIT_XOR: |
| 1838 | case Token::BIT_AND: { |
| 1839 | operand->ToRegister(); |
| 1840 | frame_->Spill(operand->reg()); |
| 1841 | if (reversed) { |
| 1842 | // Bit operations with a constant smi are commutative. |
| 1843 | // We can swap left and right operands with no problem. |
| 1844 | // Swap left and right overwrite modes. 0->0, 1->2, 2->1. |
| 1845 | overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3); |
| 1846 | } |
| 1847 | DeferredCode* deferred = new DeferredInlineSmiOperation(op, |
| 1848 | operand->reg(), |
| 1849 | operand->reg(), |
| 1850 | smi_value, |
| 1851 | overwrite_mode); |
| 1852 | JumpIfNotSmiUsingTypeInfo(operand->reg(), operand->type_info(), |
| 1853 | deferred); |
| 1854 | if (op == Token::BIT_AND) { |
| 1855 | __ SmiAndConstant(operand->reg(), operand->reg(), smi_value); |
| 1856 | } else if (op == Token::BIT_XOR) { |
| 1857 | if (int_value != 0) { |
| 1858 | __ SmiXorConstant(operand->reg(), operand->reg(), smi_value); |
| 1859 | } |
| 1860 | } else { |
| 1861 | ASSERT(op == Token::BIT_OR); |
| 1862 | if (int_value != 0) { |
| 1863 | __ SmiOrConstant(operand->reg(), operand->reg(), smi_value); |
| 1864 | } |
| 1865 | } |
| 1866 | deferred->BindExit(); |
| 1867 | answer = *operand; |
| 1868 | break; |
| 1869 | } |
| 1870 | |
| 1871 | // Generate inline code for mod of powers of 2 and negative powers of 2. |
| 1872 | case Token::MOD: |
| 1873 | if (!reversed && |
| 1874 | int_value != 0 && |
| 1875 | (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) { |
| 1876 | operand->ToRegister(); |
| 1877 | frame_->Spill(operand->reg()); |
| 1878 | DeferredCode* deferred = |
| 1879 | new DeferredInlineSmiOperation(op, |
| 1880 | operand->reg(), |
| 1881 | operand->reg(), |
| 1882 | smi_value, |
| 1883 | overwrite_mode); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 1884 | __ JumpUnlessNonNegativeSmi(operand->reg(), deferred->entry_label()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1885 | if (int_value < 0) int_value = -int_value; |
| 1886 | if (int_value == 1) { |
| 1887 | __ Move(operand->reg(), Smi::FromInt(0)); |
| 1888 | } else { |
| 1889 | __ SmiAndConstant(operand->reg(), |
| 1890 | operand->reg(), |
| 1891 | Smi::FromInt(int_value - 1)); |
| 1892 | } |
| 1893 | deferred->BindExit(); |
| 1894 | answer = *operand; |
| 1895 | break; // This break only applies if we generated code for MOD. |
| 1896 | } |
| 1897 | // Fall through if we did not find a power of 2 on the right hand side! |
| 1898 | // The next case must be the default. |
| 1899 | |
| 1900 | default: { |
| 1901 | Result constant_operand(value); |
| 1902 | if (reversed) { |
| 1903 | answer = LikelySmiBinaryOperation(expr, &constant_operand, operand, |
| 1904 | overwrite_mode); |
| 1905 | } else { |
| 1906 | answer = LikelySmiBinaryOperation(expr, operand, &constant_operand, |
| 1907 | overwrite_mode); |
| 1908 | } |
| 1909 | break; |
| 1910 | } |
| 1911 | } |
| 1912 | ASSERT(answer.is_valid()); |
| 1913 | return answer; |
| 1914 | } |
| 1915 | |
| 1916 | |
| 1917 | static bool CouldBeNaN(const Result& result) { |
| 1918 | if (result.type_info().IsSmi()) return false; |
| 1919 | if (result.type_info().IsInteger32()) return false; |
| 1920 | if (!result.is_constant()) return true; |
| 1921 | if (!result.handle()->IsHeapNumber()) return false; |
| 1922 | return isnan(HeapNumber::cast(*result.handle())->value()); |
| 1923 | } |
| 1924 | |
| 1925 | |
| 1926 | // Convert from signed to unsigned comparison to match the way EFLAGS are set |
| 1927 | // by FPU and XMM compare instructions. |
| 1928 | static Condition DoubleCondition(Condition cc) { |
| 1929 | switch (cc) { |
| 1930 | case less: return below; |
| 1931 | case equal: return equal; |
| 1932 | case less_equal: return below_equal; |
| 1933 | case greater: return above; |
| 1934 | case greater_equal: return above_equal; |
| 1935 | default: UNREACHABLE(); |
| 1936 | } |
| 1937 | UNREACHABLE(); |
| 1938 | return equal; |
| 1939 | } |
| 1940 | |
| 1941 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1942 | static CompareFlags ComputeCompareFlags(NaNInformation nan_info, |
| 1943 | bool inline_number_compare) { |
| 1944 | CompareFlags flags = NO_SMI_COMPARE_IN_STUB; |
| 1945 | if (nan_info == kCantBothBeNaN) { |
| 1946 | flags = static_cast<CompareFlags>(flags | CANT_BOTH_BE_NAN); |
| 1947 | } |
| 1948 | if (inline_number_compare) { |
| 1949 | flags = static_cast<CompareFlags>(flags | NO_NUMBER_COMPARE_IN_STUB); |
| 1950 | } |
| 1951 | return flags; |
| 1952 | } |
| 1953 | |
| 1954 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 1955 | void CodeGenerator::Comparison(AstNode* node, |
| 1956 | Condition cc, |
| 1957 | bool strict, |
| 1958 | ControlDestination* dest) { |
| 1959 | // Strict only makes sense for equality comparisons. |
| 1960 | ASSERT(!strict || cc == equal); |
| 1961 | |
| 1962 | Result left_side; |
| 1963 | Result right_side; |
| 1964 | // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order. |
| 1965 | if (cc == greater || cc == less_equal) { |
| 1966 | cc = ReverseCondition(cc); |
| 1967 | left_side = frame_->Pop(); |
| 1968 | right_side = frame_->Pop(); |
| 1969 | } else { |
| 1970 | right_side = frame_->Pop(); |
| 1971 | left_side = frame_->Pop(); |
| 1972 | } |
| 1973 | ASSERT(cc == less || cc == equal || cc == greater_equal); |
| 1974 | |
| 1975 | // If either side is a constant smi, optimize the comparison. |
| 1976 | bool left_side_constant_smi = false; |
| 1977 | bool left_side_constant_null = false; |
| 1978 | bool left_side_constant_1_char_string = false; |
| 1979 | if (left_side.is_constant()) { |
| 1980 | left_side_constant_smi = left_side.handle()->IsSmi(); |
| 1981 | left_side_constant_null = left_side.handle()->IsNull(); |
| 1982 | left_side_constant_1_char_string = |
| 1983 | (left_side.handle()->IsString() && |
| 1984 | String::cast(*left_side.handle())->length() == 1 && |
| 1985 | String::cast(*left_side.handle())->IsAsciiRepresentation()); |
| 1986 | } |
| 1987 | bool right_side_constant_smi = false; |
| 1988 | bool right_side_constant_null = false; |
| 1989 | bool right_side_constant_1_char_string = false; |
| 1990 | if (right_side.is_constant()) { |
| 1991 | right_side_constant_smi = right_side.handle()->IsSmi(); |
| 1992 | right_side_constant_null = right_side.handle()->IsNull(); |
| 1993 | right_side_constant_1_char_string = |
| 1994 | (right_side.handle()->IsString() && |
| 1995 | String::cast(*right_side.handle())->length() == 1 && |
| 1996 | String::cast(*right_side.handle())->IsAsciiRepresentation()); |
| 1997 | } |
| 1998 | |
| 1999 | if (left_side_constant_smi || right_side_constant_smi) { |
| 2000 | bool is_loop_condition = (node->AsExpression() != NULL) && |
| 2001 | node->AsExpression()->is_loop_condition(); |
| 2002 | ConstantSmiComparison(cc, strict, dest, &left_side, &right_side, |
| 2003 | left_side_constant_smi, right_side_constant_smi, |
| 2004 | is_loop_condition); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2005 | } else if (left_side_constant_1_char_string || |
| 2006 | right_side_constant_1_char_string) { |
| 2007 | if (left_side_constant_1_char_string && right_side_constant_1_char_string) { |
| 2008 | // Trivial case, comparing two constants. |
| 2009 | int left_value = String::cast(*left_side.handle())->Get(0); |
| 2010 | int right_value = String::cast(*right_side.handle())->Get(0); |
| 2011 | switch (cc) { |
| 2012 | case less: |
| 2013 | dest->Goto(left_value < right_value); |
| 2014 | break; |
| 2015 | case equal: |
| 2016 | dest->Goto(left_value == right_value); |
| 2017 | break; |
| 2018 | case greater_equal: |
| 2019 | dest->Goto(left_value >= right_value); |
| 2020 | break; |
| 2021 | default: |
| 2022 | UNREACHABLE(); |
| 2023 | } |
| 2024 | } else { |
| 2025 | // Only one side is a constant 1 character string. |
| 2026 | // If left side is a constant 1-character string, reverse the operands. |
| 2027 | // Since one side is a constant string, conversion order does not matter. |
| 2028 | if (left_side_constant_1_char_string) { |
| 2029 | Result temp = left_side; |
| 2030 | left_side = right_side; |
| 2031 | right_side = temp; |
| 2032 | cc = ReverseCondition(cc); |
| 2033 | // This may reintroduce greater or less_equal as the value of cc. |
| 2034 | // CompareStub and the inline code both support all values of cc. |
| 2035 | } |
| 2036 | // Implement comparison against a constant string, inlining the case |
| 2037 | // where both sides are strings. |
| 2038 | left_side.ToRegister(); |
| 2039 | |
| 2040 | // Here we split control flow to the stub call and inlined cases |
| 2041 | // before finally splitting it to the control destination. We use |
| 2042 | // a jump target and branching to duplicate the virtual frame at |
| 2043 | // the first split. We manually handle the off-frame references |
| 2044 | // by reconstituting them on the non-fall-through path. |
| 2045 | JumpTarget is_not_string, is_string; |
| 2046 | Register left_reg = left_side.reg(); |
| 2047 | Handle<Object> right_val = right_side.handle(); |
| 2048 | ASSERT(StringShape(String::cast(*right_val)).IsSymbol()); |
| 2049 | Condition is_smi = masm()->CheckSmi(left_reg); |
| 2050 | is_not_string.Branch(is_smi, &left_side); |
| 2051 | Result temp = allocator_->Allocate(); |
| 2052 | ASSERT(temp.is_valid()); |
| 2053 | __ movq(temp.reg(), |
| 2054 | FieldOperand(left_reg, HeapObject::kMapOffset)); |
| 2055 | __ movzxbl(temp.reg(), |
| 2056 | FieldOperand(temp.reg(), Map::kInstanceTypeOffset)); |
| 2057 | // If we are testing for equality then make use of the symbol shortcut. |
| 2058 | // Check if the left hand side has the same type as the right hand |
| 2059 | // side (which is always a symbol). |
| 2060 | if (cc == equal) { |
| 2061 | Label not_a_symbol; |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2062 | STATIC_ASSERT(kSymbolTag != 0); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2063 | // Ensure that no non-strings have the symbol bit set. |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2064 | STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2065 | __ testb(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit. |
| 2066 | __ j(zero, ¬_a_symbol); |
| 2067 | // They are symbols, so do identity compare. |
| 2068 | __ Cmp(left_reg, right_side.handle()); |
| 2069 | dest->true_target()->Branch(equal); |
| 2070 | dest->false_target()->Branch(not_equal); |
| 2071 | __ bind(¬_a_symbol); |
| 2072 | } |
| 2073 | // Call the compare stub if the left side is not a flat ascii string. |
| 2074 | __ andb(temp.reg(), |
| 2075 | Immediate(kIsNotStringMask | |
| 2076 | kStringRepresentationMask | |
| 2077 | kStringEncodingMask)); |
| 2078 | __ cmpb(temp.reg(), |
| 2079 | Immediate(kStringTag | kSeqStringTag | kAsciiStringTag)); |
| 2080 | temp.Unuse(); |
| 2081 | is_string.Branch(equal, &left_side); |
| 2082 | |
| 2083 | // Setup and call the compare stub. |
| 2084 | is_not_string.Bind(&left_side); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2085 | CompareFlags flags = |
| 2086 | static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB); |
| 2087 | CompareStub stub(cc, strict, flags); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2088 | Result result = frame_->CallStub(&stub, &left_side, &right_side); |
| 2089 | result.ToRegister(); |
| 2090 | __ testq(result.reg(), result.reg()); |
| 2091 | result.Unuse(); |
| 2092 | dest->true_target()->Branch(cc); |
| 2093 | dest->false_target()->Jump(); |
| 2094 | |
| 2095 | is_string.Bind(&left_side); |
| 2096 | // left_side is a sequential ASCII string. |
| 2097 | ASSERT(left_side.reg().is(left_reg)); |
| 2098 | right_side = Result(right_val); |
| 2099 | Result temp2 = allocator_->Allocate(); |
| 2100 | ASSERT(temp2.is_valid()); |
| 2101 | // Test string equality and comparison. |
| 2102 | if (cc == equal) { |
| 2103 | Label comparison_done; |
| 2104 | __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset), |
| 2105 | Smi::FromInt(1)); |
| 2106 | __ j(not_equal, &comparison_done); |
| 2107 | uint8_t char_value = |
| 2108 | static_cast<uint8_t>(String::cast(*right_val)->Get(0)); |
| 2109 | __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize), |
| 2110 | Immediate(char_value)); |
| 2111 | __ bind(&comparison_done); |
| 2112 | } else { |
| 2113 | __ movq(temp2.reg(), |
| 2114 | FieldOperand(left_side.reg(), String::kLengthOffset)); |
| 2115 | __ SmiSubConstant(temp2.reg(), temp2.reg(), Smi::FromInt(1)); |
| 2116 | Label comparison; |
| 2117 | // If the length is 0 then the subtraction gave -1 which compares less |
| 2118 | // than any character. |
| 2119 | __ j(negative, &comparison); |
| 2120 | // Otherwise load the first character. |
| 2121 | __ movzxbl(temp2.reg(), |
| 2122 | FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize)); |
| 2123 | __ bind(&comparison); |
| 2124 | // Compare the first character of the string with the |
| 2125 | // constant 1-character string. |
| 2126 | uint8_t char_value = |
| 2127 | static_cast<uint8_t>(String::cast(*right_side.handle())->Get(0)); |
| 2128 | __ cmpb(temp2.reg(), Immediate(char_value)); |
| 2129 | Label characters_were_different; |
| 2130 | __ j(not_equal, &characters_were_different); |
| 2131 | // If the first character is the same then the long string sorts after |
| 2132 | // the short one. |
| 2133 | __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset), |
| 2134 | Smi::FromInt(1)); |
| 2135 | __ bind(&characters_were_different); |
| 2136 | } |
| 2137 | temp2.Unuse(); |
| 2138 | left_side.Unuse(); |
| 2139 | right_side.Unuse(); |
| 2140 | dest->Split(cc); |
| 2141 | } |
| 2142 | } else { |
| 2143 | // Neither side is a constant Smi, constant 1-char string, or constant null. |
| 2144 | // If either side is a non-smi constant, or known to be a heap number, |
| 2145 | // skip the smi check. |
| 2146 | bool known_non_smi = |
| 2147 | (left_side.is_constant() && !left_side.handle()->IsSmi()) || |
| 2148 | (right_side.is_constant() && !right_side.handle()->IsSmi()) || |
| 2149 | left_side.type_info().IsDouble() || |
| 2150 | right_side.type_info().IsDouble(); |
| 2151 | |
| 2152 | NaNInformation nan_info = |
| 2153 | (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ? |
| 2154 | kBothCouldBeNaN : |
| 2155 | kCantBothBeNaN; |
| 2156 | |
| 2157 | // Inline number comparison handling any combination of smi's and heap |
| 2158 | // numbers if: |
| 2159 | // code is in a loop |
| 2160 | // the compare operation is different from equal |
| 2161 | // compare is not a for-loop comparison |
| 2162 | // The reason for excluding equal is that it will most likely be done |
| 2163 | // with smi's (not heap numbers) and the code to comparing smi's is inlined |
| 2164 | // separately. The same reason applies for for-loop comparison which will |
| 2165 | // also most likely be smi comparisons. |
| 2166 | bool is_loop_condition = (node->AsExpression() != NULL) |
| 2167 | && node->AsExpression()->is_loop_condition(); |
| 2168 | bool inline_number_compare = |
| 2169 | loop_nesting() > 0 && cc != equal && !is_loop_condition; |
| 2170 | |
| 2171 | // Left and right needed in registers for the following code. |
| 2172 | left_side.ToRegister(); |
| 2173 | right_side.ToRegister(); |
| 2174 | |
| 2175 | if (known_non_smi) { |
| 2176 | // Inlined equality check: |
| 2177 | // If at least one of the objects is not NaN, then if the objects |
| 2178 | // are identical, they are equal. |
| 2179 | if (nan_info == kCantBothBeNaN && cc == equal) { |
| 2180 | __ cmpq(left_side.reg(), right_side.reg()); |
| 2181 | dest->true_target()->Branch(equal); |
| 2182 | } |
| 2183 | |
| 2184 | // Inlined number comparison: |
| 2185 | if (inline_number_compare) { |
| 2186 | GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); |
| 2187 | } |
| 2188 | |
| 2189 | // End of in-line compare, call out to the compare stub. Don't include |
| 2190 | // number comparison in the stub if it was inlined. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2191 | CompareFlags flags = ComputeCompareFlags(nan_info, inline_number_compare); |
| 2192 | CompareStub stub(cc, strict, flags); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2193 | Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
| 2194 | __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flag. |
| 2195 | answer.Unuse(); |
| 2196 | dest->Split(cc); |
| 2197 | } else { |
| 2198 | // Here we split control flow to the stub call and inlined cases |
| 2199 | // before finally splitting it to the control destination. We use |
| 2200 | // a jump target and branching to duplicate the virtual frame at |
| 2201 | // the first split. We manually handle the off-frame references |
| 2202 | // by reconstituting them on the non-fall-through path. |
| 2203 | JumpTarget is_smi; |
| 2204 | Register left_reg = left_side.reg(); |
| 2205 | Register right_reg = right_side.reg(); |
| 2206 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2207 | // In-line check for comparing two smis. |
| 2208 | JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2209 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2210 | if (has_valid_frame()) { |
| 2211 | // Inline the equality check if both operands can't be a NaN. If both |
| 2212 | // objects are the same they are equal. |
| 2213 | if (nan_info == kCantBothBeNaN && cc == equal) { |
| 2214 | __ cmpq(left_side.reg(), right_side.reg()); |
| 2215 | dest->true_target()->Branch(equal); |
| 2216 | } |
| 2217 | |
| 2218 | // Inlined number comparison: |
| 2219 | if (inline_number_compare) { |
| 2220 | GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); |
| 2221 | } |
| 2222 | |
| 2223 | // End of in-line compare, call out to the compare stub. Don't include |
| 2224 | // number comparison in the stub if it was inlined. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2225 | CompareFlags flags = |
| 2226 | ComputeCompareFlags(nan_info, inline_number_compare); |
| 2227 | CompareStub stub(cc, strict, flags); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2228 | Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
| 2229 | __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags. |
| 2230 | answer.Unuse(); |
| 2231 | if (is_smi.is_linked()) { |
| 2232 | dest->true_target()->Branch(cc); |
| 2233 | dest->false_target()->Jump(); |
| 2234 | } else { |
| 2235 | dest->Split(cc); |
| 2236 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2237 | } |
| 2238 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2239 | if (is_smi.is_linked()) { |
| 2240 | is_smi.Bind(); |
| 2241 | left_side = Result(left_reg); |
| 2242 | right_side = Result(right_reg); |
| 2243 | __ SmiCompare(left_side.reg(), right_side.reg()); |
| 2244 | right_side.Unuse(); |
| 2245 | left_side.Unuse(); |
| 2246 | dest->Split(cc); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2247 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2248 | } |
| 2249 | } |
| 2250 | } |
| 2251 | |
| 2252 | |
| 2253 | void CodeGenerator::ConstantSmiComparison(Condition cc, |
| 2254 | bool strict, |
| 2255 | ControlDestination* dest, |
| 2256 | Result* left_side, |
| 2257 | Result* right_side, |
| 2258 | bool left_side_constant_smi, |
| 2259 | bool right_side_constant_smi, |
| 2260 | bool is_loop_condition) { |
| 2261 | if (left_side_constant_smi && right_side_constant_smi) { |
| 2262 | // Trivial case, comparing two constants. |
| 2263 | int left_value = Smi::cast(*left_side->handle())->value(); |
| 2264 | int right_value = Smi::cast(*right_side->handle())->value(); |
| 2265 | switch (cc) { |
| 2266 | case less: |
| 2267 | dest->Goto(left_value < right_value); |
| 2268 | break; |
| 2269 | case equal: |
| 2270 | dest->Goto(left_value == right_value); |
| 2271 | break; |
| 2272 | case greater_equal: |
| 2273 | dest->Goto(left_value >= right_value); |
| 2274 | break; |
| 2275 | default: |
| 2276 | UNREACHABLE(); |
| 2277 | } |
| 2278 | } else { |
| 2279 | // Only one side is a constant Smi. |
| 2280 | // If left side is a constant Smi, reverse the operands. |
| 2281 | // Since one side is a constant Smi, conversion order does not matter. |
| 2282 | if (left_side_constant_smi) { |
| 2283 | Result* temp = left_side; |
| 2284 | left_side = right_side; |
| 2285 | right_side = temp; |
| 2286 | cc = ReverseCondition(cc); |
| 2287 | // This may re-introduce greater or less_equal as the value of cc. |
| 2288 | // CompareStub and the inline code both support all values of cc. |
| 2289 | } |
| 2290 | // Implement comparison against a constant Smi, inlining the case |
| 2291 | // where both sides are Smis. |
| 2292 | left_side->ToRegister(); |
| 2293 | Register left_reg = left_side->reg(); |
| 2294 | Smi* constant_smi = Smi::cast(*right_side->handle()); |
| 2295 | |
| 2296 | if (left_side->is_smi()) { |
| 2297 | if (FLAG_debug_code) { |
| 2298 | __ AbortIfNotSmi(left_reg); |
| 2299 | } |
| 2300 | // Test smi equality and comparison by signed int comparison. |
| 2301 | // Both sides are smis, so we can use an Immediate. |
| 2302 | __ SmiCompare(left_reg, constant_smi); |
| 2303 | left_side->Unuse(); |
| 2304 | right_side->Unuse(); |
| 2305 | dest->Split(cc); |
| 2306 | } else { |
| 2307 | // Only the case where the left side could possibly be a non-smi is left. |
| 2308 | JumpTarget is_smi; |
| 2309 | if (cc == equal) { |
| 2310 | // We can do the equality comparison before the smi check. |
| 2311 | __ SmiCompare(left_reg, constant_smi); |
| 2312 | dest->true_target()->Branch(equal); |
| 2313 | Condition left_is_smi = masm_->CheckSmi(left_reg); |
| 2314 | dest->false_target()->Branch(left_is_smi); |
| 2315 | } else { |
| 2316 | // Do the smi check, then the comparison. |
| 2317 | Condition left_is_smi = masm_->CheckSmi(left_reg); |
| 2318 | is_smi.Branch(left_is_smi, left_side, right_side); |
| 2319 | } |
| 2320 | |
| 2321 | // Jump or fall through to here if we are comparing a non-smi to a |
| 2322 | // constant smi. If the non-smi is a heap number and this is not |
| 2323 | // a loop condition, inline the floating point code. |
| 2324 | if (!is_loop_condition) { |
| 2325 | // Right side is a constant smi and left side has been checked |
| 2326 | // not to be a smi. |
| 2327 | JumpTarget not_number; |
| 2328 | __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset), |
| 2329 | Factory::heap_number_map()); |
| 2330 | not_number.Branch(not_equal, left_side); |
| 2331 | __ movsd(xmm1, |
| 2332 | FieldOperand(left_reg, HeapNumber::kValueOffset)); |
| 2333 | int value = constant_smi->value(); |
| 2334 | if (value == 0) { |
| 2335 | __ xorpd(xmm0, xmm0); |
| 2336 | } else { |
| 2337 | Result temp = allocator()->Allocate(); |
| 2338 | __ movl(temp.reg(), Immediate(value)); |
| 2339 | __ cvtlsi2sd(xmm0, temp.reg()); |
| 2340 | temp.Unuse(); |
| 2341 | } |
| 2342 | __ ucomisd(xmm1, xmm0); |
| 2343 | // Jump to builtin for NaN. |
| 2344 | not_number.Branch(parity_even, left_side); |
| 2345 | left_side->Unuse(); |
| 2346 | dest->true_target()->Branch(DoubleCondition(cc)); |
| 2347 | dest->false_target()->Jump(); |
| 2348 | not_number.Bind(left_side); |
| 2349 | } |
| 2350 | |
| 2351 | // Setup and call the compare stub. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2352 | CompareFlags flags = |
| 2353 | static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB); |
| 2354 | CompareStub stub(cc, strict, flags); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2355 | Result result = frame_->CallStub(&stub, left_side, right_side); |
| 2356 | result.ToRegister(); |
| 2357 | __ testq(result.reg(), result.reg()); |
| 2358 | result.Unuse(); |
| 2359 | if (cc == equal) { |
| 2360 | dest->Split(cc); |
| 2361 | } else { |
| 2362 | dest->true_target()->Branch(cc); |
| 2363 | dest->false_target()->Jump(); |
| 2364 | |
| 2365 | // It is important for performance for this case to be at the end. |
| 2366 | is_smi.Bind(left_side, right_side); |
| 2367 | __ SmiCompare(left_reg, constant_smi); |
| 2368 | left_side->Unuse(); |
| 2369 | right_side->Unuse(); |
| 2370 | dest->Split(cc); |
| 2371 | } |
| 2372 | } |
| 2373 | } |
| 2374 | } |
| 2375 | |
| 2376 | |
| 2377 | // Load a comparison operand into into a XMM register. Jump to not_numbers jump |
| 2378 | // target passing the left and right result if the operand is not a number. |
| 2379 | static void LoadComparisonOperand(MacroAssembler* masm_, |
| 2380 | Result* operand, |
| 2381 | XMMRegister xmm_reg, |
| 2382 | Result* left_side, |
| 2383 | Result* right_side, |
| 2384 | JumpTarget* not_numbers) { |
| 2385 | Label done; |
| 2386 | if (operand->type_info().IsDouble()) { |
| 2387 | // Operand is known to be a heap number, just load it. |
| 2388 | __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset)); |
| 2389 | } else if (operand->type_info().IsSmi()) { |
| 2390 | // Operand is known to be a smi. Convert it to double and keep the original |
| 2391 | // smi. |
| 2392 | __ SmiToInteger32(kScratchRegister, operand->reg()); |
| 2393 | __ cvtlsi2sd(xmm_reg, kScratchRegister); |
| 2394 | } else { |
| 2395 | // Operand type not known, check for smi or heap number. |
| 2396 | Label smi; |
| 2397 | __ JumpIfSmi(operand->reg(), &smi); |
| 2398 | if (!operand->type_info().IsNumber()) { |
| 2399 | __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); |
| 2400 | __ cmpq(FieldOperand(operand->reg(), HeapObject::kMapOffset), |
| 2401 | kScratchRegister); |
| 2402 | not_numbers->Branch(not_equal, left_side, right_side, taken); |
| 2403 | } |
| 2404 | __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset)); |
| 2405 | __ jmp(&done); |
| 2406 | |
| 2407 | __ bind(&smi); |
| 2408 | // Comvert smi to float and keep the original smi. |
| 2409 | __ SmiToInteger32(kScratchRegister, operand->reg()); |
| 2410 | __ cvtlsi2sd(xmm_reg, kScratchRegister); |
| 2411 | __ jmp(&done); |
| 2412 | } |
| 2413 | __ bind(&done); |
| 2414 | } |
| 2415 | |
| 2416 | |
| 2417 | void CodeGenerator::GenerateInlineNumberComparison(Result* left_side, |
| 2418 | Result* right_side, |
| 2419 | Condition cc, |
| 2420 | ControlDestination* dest) { |
| 2421 | ASSERT(left_side->is_register()); |
| 2422 | ASSERT(right_side->is_register()); |
| 2423 | |
| 2424 | JumpTarget not_numbers; |
| 2425 | // Load left and right operand into registers xmm0 and xmm1 and compare. |
| 2426 | LoadComparisonOperand(masm_, left_side, xmm0, left_side, right_side, |
| 2427 | ¬_numbers); |
| 2428 | LoadComparisonOperand(masm_, right_side, xmm1, left_side, right_side, |
| 2429 | ¬_numbers); |
| 2430 | __ ucomisd(xmm0, xmm1); |
| 2431 | // Bail out if a NaN is involved. |
| 2432 | not_numbers.Branch(parity_even, left_side, right_side); |
| 2433 | |
| 2434 | // Split to destination targets based on comparison. |
| 2435 | left_side->Unuse(); |
| 2436 | right_side->Unuse(); |
| 2437 | dest->true_target()->Branch(DoubleCondition(cc)); |
| 2438 | dest->false_target()->Jump(); |
| 2439 | |
| 2440 | not_numbers.Bind(left_side, right_side); |
| 2441 | } |
| 2442 | |
| 2443 | |
| 2444 | // Call the function just below TOS on the stack with the given |
| 2445 | // arguments. The receiver is the TOS. |
| 2446 | void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args, |
| 2447 | CallFunctionFlags flags, |
| 2448 | int position) { |
| 2449 | // Push the arguments ("left-to-right") on the stack. |
| 2450 | int arg_count = args->length(); |
| 2451 | for (int i = 0; i < arg_count; i++) { |
| 2452 | Load(args->at(i)); |
| 2453 | frame_->SpillTop(); |
| 2454 | } |
| 2455 | |
| 2456 | // Record the position for debugging purposes. |
| 2457 | CodeForSourcePosition(position); |
| 2458 | |
| 2459 | // Use the shared code stub to call the function. |
| 2460 | InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
| 2461 | CallFunctionStub call_function(arg_count, in_loop, flags); |
| 2462 | Result answer = frame_->CallStub(&call_function, arg_count + 1); |
| 2463 | // Restore context and replace function on the stack with the |
| 2464 | // result of the stub invocation. |
| 2465 | frame_->RestoreContextRegister(); |
| 2466 | frame_->SetElementAt(0, &answer); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2467 | } |
| 2468 | |
| 2469 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2470 | void CodeGenerator::CallApplyLazy(Expression* applicand, |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2471 | Expression* receiver, |
| 2472 | VariableProxy* arguments, |
| 2473 | int position) { |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2474 | // An optimized implementation of expressions of the form |
| 2475 | // x.apply(y, arguments). |
| 2476 | // If the arguments object of the scope has not been allocated, |
| 2477 | // and x.apply is Function.prototype.apply, this optimization |
| 2478 | // just copies y and the arguments of the current function on the |
| 2479 | // stack, as receiver and arguments, and calls x. |
| 2480 | // In the implementation comments, we call x the applicand |
| 2481 | // and y the receiver. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2482 | ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); |
| 2483 | ASSERT(arguments->IsArguments()); |
| 2484 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2485 | // Load applicand.apply onto the stack. This will usually |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2486 | // give us a megamorphic load site. Not super, but it works. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2487 | Load(applicand); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 2488 | frame()->Dup(); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2489 | Handle<String> name = Factory::LookupAsciiSymbol("apply"); |
| 2490 | frame()->Push(name); |
| 2491 | Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); |
| 2492 | __ nop(); |
| 2493 | frame()->Push(&answer); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2494 | |
| 2495 | // Load the receiver and the existing arguments object onto the |
| 2496 | // expression stack. Avoid allocating the arguments object here. |
| 2497 | Load(receiver); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2498 | LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2499 | |
| 2500 | // Emit the source position information after having loaded the |
| 2501 | // receiver and the arguments. |
| 2502 | CodeForSourcePosition(position); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2503 | // Contents of frame at this point: |
| 2504 | // Frame[0]: arguments object of the current function or the hole. |
| 2505 | // Frame[1]: receiver |
| 2506 | // Frame[2]: applicand.apply |
| 2507 | // Frame[3]: applicand. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2508 | |
| 2509 | // Check if the arguments object has been lazily allocated |
| 2510 | // already. If so, just use that instead of copying the arguments |
| 2511 | // from the stack. This also deals with cases where a local variable |
| 2512 | // named 'arguments' has been introduced. |
| 2513 | frame_->Dup(); |
| 2514 | Result probe = frame_->Pop(); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2515 | { VirtualFrame::SpilledScope spilled_scope; |
| 2516 | Label slow, done; |
| 2517 | bool try_lazy = true; |
| 2518 | if (probe.is_constant()) { |
| 2519 | try_lazy = probe.handle()->IsTheHole(); |
| 2520 | } else { |
| 2521 | __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex); |
| 2522 | probe.Unuse(); |
| 2523 | __ j(not_equal, &slow); |
| 2524 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2525 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2526 | if (try_lazy) { |
| 2527 | Label build_args; |
| 2528 | // Get rid of the arguments object probe. |
| 2529 | frame_->Drop(); // Can be called on a spilled frame. |
| 2530 | // Stack now has 3 elements on it. |
| 2531 | // Contents of stack at this point: |
| 2532 | // rsp[0]: receiver |
| 2533 | // rsp[1]: applicand.apply |
| 2534 | // rsp[2]: applicand. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2535 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2536 | // Check that the receiver really is a JavaScript object. |
| 2537 | __ movq(rax, Operand(rsp, 0)); |
| 2538 | Condition is_smi = masm_->CheckSmi(rax); |
| 2539 | __ j(is_smi, &build_args); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2540 | // We allow all JSObjects including JSFunctions. As long as |
| 2541 | // JS_FUNCTION_TYPE is the last instance type and it is right |
| 2542 | // after LAST_JS_OBJECT_TYPE, we do not have to check the upper |
| 2543 | // bound. |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 2544 | STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 2545 | STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2546 | __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); |
| 2547 | __ j(below, &build_args); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2548 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2549 | // Check that applicand.apply is Function.prototype.apply. |
| 2550 | __ movq(rax, Operand(rsp, kPointerSize)); |
| 2551 | is_smi = masm_->CheckSmi(rax); |
| 2552 | __ j(is_smi, &build_args); |
| 2553 | __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); |
| 2554 | __ j(not_equal, &build_args); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 2555 | __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset)); |
| 2556 | __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
Leon Clarke | eab96aa | 2010-01-27 16:31:12 +0000 | [diff] [blame] | 2557 | Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 2558 | __ Cmp(rcx, apply_code); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2559 | __ j(not_equal, &build_args); |
| 2560 | |
| 2561 | // Check that applicand is a function. |
| 2562 | __ movq(rdi, Operand(rsp, 2 * kPointerSize)); |
| 2563 | is_smi = masm_->CheckSmi(rdi); |
| 2564 | __ j(is_smi, &build_args); |
| 2565 | __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 2566 | __ j(not_equal, &build_args); |
| 2567 | |
| 2568 | // Copy the arguments to this function possibly from the |
| 2569 | // adaptor frame below it. |
| 2570 | Label invoke, adapted; |
| 2571 | __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 2572 | __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset), |
| 2573 | Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 2574 | __ j(equal, &adapted); |
| 2575 | |
| 2576 | // No arguments adaptor frame. Copy fixed number of arguments. |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 2577 | __ Set(rax, scope()->num_parameters()); |
Andrei Popescu | 3100271 | 2010-02-23 13:46:05 +0000 | [diff] [blame] | 2578 | for (int i = 0; i < scope()->num_parameters(); i++) { |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2579 | __ push(frame_->ParameterAt(i)); |
| 2580 | } |
| 2581 | __ jmp(&invoke); |
| 2582 | |
| 2583 | // Arguments adaptor frame present. Copy arguments from there, but |
| 2584 | // avoid copying too many arguments to avoid stack overflows. |
| 2585 | __ bind(&adapted); |
| 2586 | static const uint32_t kArgumentsLimit = 1 * KB; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 2587 | __ SmiToInteger32(rax, |
| 2588 | Operand(rdx, |
| 2589 | ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2590 | __ movl(rcx, rax); |
| 2591 | __ cmpl(rax, Immediate(kArgumentsLimit)); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2592 | __ j(above, &build_args); |
| 2593 | |
| 2594 | // Loop through the arguments pushing them onto the execution |
| 2595 | // stack. We don't inform the virtual frame of the push, so we don't |
| 2596 | // have to worry about getting rid of the elements from the virtual |
| 2597 | // frame. |
| 2598 | Label loop; |
| 2599 | // rcx is a small non-negative integer, due to the test above. |
| 2600 | __ testl(rcx, rcx); |
| 2601 | __ j(zero, &invoke); |
| 2602 | __ bind(&loop); |
| 2603 | __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize)); |
| 2604 | __ decl(rcx); |
| 2605 | __ j(not_zero, &loop); |
| 2606 | |
| 2607 | // Invoke the function. |
| 2608 | __ bind(&invoke); |
| 2609 | ParameterCount actual(rax); |
| 2610 | __ InvokeFunction(rdi, actual, CALL_FUNCTION); |
| 2611 | // Drop applicand.apply and applicand from the stack, and push |
| 2612 | // the result of the function call, but leave the spilled frame |
| 2613 | // unchanged, with 3 elements, so it is correct when we compile the |
| 2614 | // slow-case code. |
| 2615 | __ addq(rsp, Immediate(2 * kPointerSize)); |
| 2616 | __ push(rax); |
| 2617 | // Stack now has 1 element: |
| 2618 | // rsp[0]: result |
| 2619 | __ jmp(&done); |
| 2620 | |
| 2621 | // Slow-case: Allocate the arguments object since we know it isn't |
| 2622 | // there, and fall-through to the slow-case where we call |
| 2623 | // applicand.apply. |
| 2624 | __ bind(&build_args); |
| 2625 | // Stack now has 3 elements, because we have jumped from where: |
| 2626 | // rsp[0]: receiver |
| 2627 | // rsp[1]: applicand.apply |
| 2628 | // rsp[2]: applicand. |
| 2629 | |
| 2630 | // StoreArgumentsObject requires a correct frame, and may modify it. |
| 2631 | Result arguments_object = StoreArgumentsObject(false); |
| 2632 | frame_->SpillAll(); |
| 2633 | arguments_object.ToRegister(); |
| 2634 | frame_->EmitPush(arguments_object.reg()); |
| 2635 | arguments_object.Unuse(); |
| 2636 | // Stack and frame now have 4 elements. |
| 2637 | __ bind(&slow); |
Leon Clarke | eab96aa | 2010-01-27 16:31:12 +0000 | [diff] [blame] | 2638 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2639 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2640 | // Generic computation of x.apply(y, args) with no special optimization. |
| 2641 | // Flip applicand.apply and applicand on the stack, so |
| 2642 | // applicand looks like the receiver of the applicand.apply call. |
| 2643 | // Then process it as a normal function call. |
| 2644 | __ movq(rax, Operand(rsp, 3 * kPointerSize)); |
| 2645 | __ movq(rbx, Operand(rsp, 2 * kPointerSize)); |
| 2646 | __ movq(Operand(rsp, 2 * kPointerSize), rax); |
| 2647 | __ movq(Operand(rsp, 3 * kPointerSize), rbx); |
Leon Clarke | eab96aa | 2010-01-27 16:31:12 +0000 | [diff] [blame] | 2648 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 2649 | CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS); |
| 2650 | Result res = frame_->CallStub(&call_function, 3); |
| 2651 | // The function and its two arguments have been dropped. |
| 2652 | frame_->Drop(1); // Drop the receiver as well. |
| 2653 | res.ToRegister(); |
| 2654 | frame_->EmitPush(res.reg()); |
| 2655 | // Stack now has 1 element: |
| 2656 | // rsp[0]: result |
| 2657 | if (try_lazy) __ bind(&done); |
| 2658 | } // End of spilled scope. |
| 2659 | // Restore the context register after a call. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2660 | frame_->RestoreContextRegister(); |
| 2661 | } |
| 2662 | |
| 2663 | |
| 2664 | class DeferredStackCheck: public DeferredCode { |
| 2665 | public: |
| 2666 | DeferredStackCheck() { |
| 2667 | set_comment("[ DeferredStackCheck"); |
| 2668 | } |
| 2669 | |
| 2670 | virtual void Generate(); |
| 2671 | }; |
| 2672 | |
| 2673 | |
| 2674 | void DeferredStackCheck::Generate() { |
| 2675 | StackCheckStub stub; |
| 2676 | __ CallStub(&stub); |
| 2677 | } |
| 2678 | |
| 2679 | |
| 2680 | void CodeGenerator::CheckStack() { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2681 | DeferredStackCheck* deferred = new DeferredStackCheck; |
| 2682 | __ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
| 2683 | deferred->Branch(below); |
| 2684 | deferred->BindExit(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2685 | } |
| 2686 | |
| 2687 | |
| 2688 | void CodeGenerator::VisitAndSpill(Statement* statement) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2689 | ASSERT(in_spilled_code()); |
| 2690 | set_in_spilled_code(false); |
| 2691 | Visit(statement); |
| 2692 | if (frame_ != NULL) { |
| 2693 | frame_->SpillAll(); |
| 2694 | } |
| 2695 | set_in_spilled_code(true); |
| 2696 | } |
| 2697 | |
| 2698 | |
| 2699 | void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2700 | #ifdef DEBUG |
| 2701 | int original_height = frame_->height(); |
| 2702 | #endif |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2703 | ASSERT(in_spilled_code()); |
| 2704 | set_in_spilled_code(false); |
| 2705 | VisitStatements(statements); |
| 2706 | if (frame_ != NULL) { |
| 2707 | frame_->SpillAll(); |
| 2708 | } |
| 2709 | set_in_spilled_code(true); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2710 | |
| 2711 | ASSERT(!has_valid_frame() || frame_->height() == original_height); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2712 | } |
| 2713 | |
| 2714 | |
| 2715 | void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2716 | #ifdef DEBUG |
| 2717 | int original_height = frame_->height(); |
| 2718 | #endif |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2719 | ASSERT(!in_spilled_code()); |
| 2720 | for (int i = 0; has_valid_frame() && i < statements->length(); i++) { |
| 2721 | Visit(statements->at(i)); |
| 2722 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2723 | ASSERT(!has_valid_frame() || frame_->height() == original_height); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2724 | } |
| 2725 | |
| 2726 | |
| 2727 | void CodeGenerator::VisitBlock(Block* node) { |
| 2728 | ASSERT(!in_spilled_code()); |
| 2729 | Comment cmnt(masm_, "[ Block"); |
| 2730 | CodeForStatementPosition(node); |
| 2731 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 2732 | VisitStatements(node->statements()); |
| 2733 | if (node->break_target()->is_linked()) { |
| 2734 | node->break_target()->Bind(); |
| 2735 | } |
| 2736 | node->break_target()->Unuse(); |
| 2737 | } |
| 2738 | |
| 2739 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2740 | void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 2741 | // Call the runtime to declare the globals. The inevitable call |
| 2742 | // will sync frame elements to memory anyway, so we do it eagerly to |
| 2743 | // allow us to push the arguments directly into place. |
| 2744 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 2745 | |
| 2746 | __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT); |
| 2747 | frame_->EmitPush(rsi); // The context is the first argument. |
| 2748 | frame_->EmitPush(kScratchRegister); |
| 2749 | frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0)); |
| 2750 | Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3); |
| 2751 | // Return value is ignored. |
| 2752 | } |
| 2753 | |
| 2754 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2755 | void CodeGenerator::VisitDeclaration(Declaration* node) { |
| 2756 | Comment cmnt(masm_, "[ Declaration"); |
| 2757 | Variable* var = node->proxy()->var(); |
| 2758 | ASSERT(var != NULL); // must have been resolved |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 2759 | Slot* slot = var->AsSlot(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2760 | |
| 2761 | // If it was not possible to allocate the variable at compile time, |
| 2762 | // we need to "declare" it at runtime to make sure it actually |
| 2763 | // exists in the local context. |
| 2764 | if (slot != NULL && slot->type() == Slot::LOOKUP) { |
| 2765 | // Variables with a "LOOKUP" slot were introduced as non-locals |
| 2766 | // during variable resolution and must have mode DYNAMIC. |
| 2767 | ASSERT(var->is_dynamic()); |
| 2768 | // For now, just do a runtime call. Sync the virtual frame eagerly |
| 2769 | // so we can simply push the arguments into place. |
| 2770 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 2771 | frame_->EmitPush(rsi); |
| 2772 | __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT); |
| 2773 | frame_->EmitPush(kScratchRegister); |
| 2774 | // Declaration nodes are always introduced in one of two modes. |
| 2775 | ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); |
| 2776 | PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 2777 | frame_->EmitPush(Smi::FromInt(attr)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2778 | // Push initial value, if any. |
| 2779 | // Note: For variables we must not push an initial value (such as |
| 2780 | // 'undefined') because we may have a (legal) redeclaration and we |
| 2781 | // must not destroy the current value. |
| 2782 | if (node->mode() == Variable::CONST) { |
| 2783 | frame_->EmitPush(Heap::kTheHoleValueRootIndex); |
| 2784 | } else if (node->fun() != NULL) { |
| 2785 | Load(node->fun()); |
| 2786 | } else { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 2787 | frame_->EmitPush(Smi::FromInt(0)); // no initial value! |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2788 | } |
| 2789 | Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
| 2790 | // Ignore the return value (declarations are statements). |
| 2791 | return; |
| 2792 | } |
| 2793 | |
| 2794 | ASSERT(!var->is_global()); |
| 2795 | |
| 2796 | // If we have a function or a constant, we need to initialize the variable. |
| 2797 | Expression* val = NULL; |
| 2798 | if (node->mode() == Variable::CONST) { |
| 2799 | val = new Literal(Factory::the_hole_value()); |
| 2800 | } else { |
| 2801 | val = node->fun(); // NULL if we don't have a function |
| 2802 | } |
| 2803 | |
| 2804 | if (val != NULL) { |
| 2805 | { |
| 2806 | // Set the initial value. |
| 2807 | Reference target(this, node->proxy()); |
| 2808 | Load(val); |
| 2809 | target.SetValue(NOT_CONST_INIT); |
| 2810 | // The reference is removed from the stack (preserving TOS) when |
| 2811 | // it goes out of scope. |
| 2812 | } |
| 2813 | // Get rid of the assigned value (declarations are statements). |
| 2814 | frame_->Drop(); |
| 2815 | } |
| 2816 | } |
| 2817 | |
| 2818 | |
| 2819 | void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) { |
| 2820 | ASSERT(!in_spilled_code()); |
| 2821 | Comment cmnt(masm_, "[ ExpressionStatement"); |
| 2822 | CodeForStatementPosition(node); |
| 2823 | Expression* expression = node->expression(); |
| 2824 | expression->MarkAsStatement(); |
| 2825 | Load(expression); |
| 2826 | // Remove the lingering expression result from the top of stack. |
| 2827 | frame_->Drop(); |
| 2828 | } |
| 2829 | |
| 2830 | |
| 2831 | void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) { |
| 2832 | ASSERT(!in_spilled_code()); |
| 2833 | Comment cmnt(masm_, "// EmptyStatement"); |
| 2834 | CodeForStatementPosition(node); |
| 2835 | // nothing to do |
| 2836 | } |
| 2837 | |
| 2838 | |
| 2839 | void CodeGenerator::VisitIfStatement(IfStatement* node) { |
| 2840 | ASSERT(!in_spilled_code()); |
| 2841 | Comment cmnt(masm_, "[ IfStatement"); |
| 2842 | // Generate different code depending on which parts of the if statement |
| 2843 | // are present or not. |
| 2844 | bool has_then_stm = node->HasThenStatement(); |
| 2845 | bool has_else_stm = node->HasElseStatement(); |
| 2846 | |
| 2847 | CodeForStatementPosition(node); |
| 2848 | JumpTarget exit; |
| 2849 | if (has_then_stm && has_else_stm) { |
| 2850 | JumpTarget then; |
| 2851 | JumpTarget else_; |
| 2852 | ControlDestination dest(&then, &else_, true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2853 | LoadCondition(node->condition(), &dest, true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2854 | |
| 2855 | if (dest.false_was_fall_through()) { |
| 2856 | // The else target was bound, so we compile the else part first. |
| 2857 | Visit(node->else_statement()); |
| 2858 | |
| 2859 | // We may have dangling jumps to the then part. |
| 2860 | if (then.is_linked()) { |
| 2861 | if (has_valid_frame()) exit.Jump(); |
| 2862 | then.Bind(); |
| 2863 | Visit(node->then_statement()); |
| 2864 | } |
| 2865 | } else { |
| 2866 | // The then target was bound, so we compile the then part first. |
| 2867 | Visit(node->then_statement()); |
| 2868 | |
| 2869 | if (else_.is_linked()) { |
| 2870 | if (has_valid_frame()) exit.Jump(); |
| 2871 | else_.Bind(); |
| 2872 | Visit(node->else_statement()); |
| 2873 | } |
| 2874 | } |
| 2875 | |
| 2876 | } else if (has_then_stm) { |
| 2877 | ASSERT(!has_else_stm); |
| 2878 | JumpTarget then; |
| 2879 | ControlDestination dest(&then, &exit, true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2880 | LoadCondition(node->condition(), &dest, true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2881 | |
| 2882 | if (dest.false_was_fall_through()) { |
| 2883 | // The exit label was bound. We may have dangling jumps to the |
| 2884 | // then part. |
| 2885 | if (then.is_linked()) { |
| 2886 | exit.Unuse(); |
| 2887 | exit.Jump(); |
| 2888 | then.Bind(); |
| 2889 | Visit(node->then_statement()); |
| 2890 | } |
| 2891 | } else { |
| 2892 | // The then label was bound. |
| 2893 | Visit(node->then_statement()); |
| 2894 | } |
| 2895 | |
| 2896 | } else if (has_else_stm) { |
| 2897 | ASSERT(!has_then_stm); |
| 2898 | JumpTarget else_; |
| 2899 | ControlDestination dest(&exit, &else_, false); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2900 | LoadCondition(node->condition(), &dest, true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2901 | |
| 2902 | if (dest.true_was_fall_through()) { |
| 2903 | // The exit label was bound. We may have dangling jumps to the |
| 2904 | // else part. |
| 2905 | if (else_.is_linked()) { |
| 2906 | exit.Unuse(); |
| 2907 | exit.Jump(); |
| 2908 | else_.Bind(); |
| 2909 | Visit(node->else_statement()); |
| 2910 | } |
| 2911 | } else { |
| 2912 | // The else label was bound. |
| 2913 | Visit(node->else_statement()); |
| 2914 | } |
| 2915 | |
| 2916 | } else { |
| 2917 | ASSERT(!has_then_stm && !has_else_stm); |
| 2918 | // We only care about the condition's side effects (not its value |
| 2919 | // or control flow effect). LoadCondition is called without |
| 2920 | // forcing control flow. |
| 2921 | ControlDestination dest(&exit, &exit, true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 2922 | LoadCondition(node->condition(), &dest, false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2923 | if (!dest.is_used()) { |
| 2924 | // We got a value on the frame rather than (or in addition to) |
| 2925 | // control flow. |
| 2926 | frame_->Drop(); |
| 2927 | } |
| 2928 | } |
| 2929 | |
| 2930 | if (exit.is_linked()) { |
| 2931 | exit.Bind(); |
| 2932 | } |
| 2933 | } |
| 2934 | |
| 2935 | |
| 2936 | void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
| 2937 | ASSERT(!in_spilled_code()); |
| 2938 | Comment cmnt(masm_, "[ ContinueStatement"); |
| 2939 | CodeForStatementPosition(node); |
| 2940 | node->target()->continue_target()->Jump(); |
| 2941 | } |
| 2942 | |
| 2943 | |
| 2944 | void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
| 2945 | ASSERT(!in_spilled_code()); |
| 2946 | Comment cmnt(masm_, "[ BreakStatement"); |
| 2947 | CodeForStatementPosition(node); |
| 2948 | node->target()->break_target()->Jump(); |
| 2949 | } |
| 2950 | |
| 2951 | |
| 2952 | void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
| 2953 | ASSERT(!in_spilled_code()); |
| 2954 | Comment cmnt(masm_, "[ ReturnStatement"); |
| 2955 | |
| 2956 | CodeForStatementPosition(node); |
| 2957 | Load(node->expression()); |
| 2958 | Result return_value = frame_->Pop(); |
Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 2959 | masm()->positions_recorder()->WriteRecordedPositions(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2960 | if (function_return_is_shadowed_) { |
| 2961 | function_return_.Jump(&return_value); |
| 2962 | } else { |
| 2963 | frame_->PrepareForReturn(); |
| 2964 | if (function_return_.is_bound()) { |
| 2965 | // If the function return label is already bound we reuse the |
| 2966 | // code by jumping to the return site. |
| 2967 | function_return_.Jump(&return_value); |
| 2968 | } else { |
| 2969 | function_return_.Bind(&return_value); |
| 2970 | GenerateReturnSequence(&return_value); |
| 2971 | } |
| 2972 | } |
| 2973 | } |
| 2974 | |
| 2975 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 2976 | void CodeGenerator::GenerateReturnSequence(Result* return_value) { |
| 2977 | // The return value is a live (but not currently reference counted) |
| 2978 | // reference to rax. This is safe because the current frame does not |
| 2979 | // contain a reference to rax (it is prepared for the return by spilling |
| 2980 | // all registers). |
| 2981 | if (FLAG_trace) { |
| 2982 | frame_->Push(return_value); |
| 2983 | *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); |
| 2984 | } |
| 2985 | return_value->ToRegister(rax); |
| 2986 | |
| 2987 | // Add a label for checking the size of the code used for returning. |
| 2988 | #ifdef DEBUG |
| 2989 | Label check_exit_codesize; |
| 2990 | masm_->bind(&check_exit_codesize); |
| 2991 | #endif |
| 2992 | |
| 2993 | // Leave the frame and return popping the arguments and the |
| 2994 | // receiver. |
| 2995 | frame_->Exit(); |
| 2996 | masm_->ret((scope()->num_parameters() + 1) * kPointerSize); |
| 2997 | DeleteFrame(); |
| 2998 | |
| 2999 | #ifdef ENABLE_DEBUGGER_SUPPORT |
| 3000 | // Add padding that will be overwritten by a debugger breakpoint. |
| 3001 | // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k" |
| 3002 | // with length 7 (3 + 1 + 3). |
| 3003 | const int kPadding = Assembler::kJSReturnSequenceLength - 7; |
| 3004 | for (int i = 0; i < kPadding; ++i) { |
| 3005 | masm_->int3(); |
| 3006 | } |
| 3007 | // Check that the size of the code used for returning matches what is |
| 3008 | // expected by the debugger. |
| 3009 | ASSERT_EQ(Assembler::kJSReturnSequenceLength, |
| 3010 | masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); |
| 3011 | #endif |
| 3012 | } |
| 3013 | |
| 3014 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3015 | void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) { |
| 3016 | ASSERT(!in_spilled_code()); |
| 3017 | Comment cmnt(masm_, "[ WithEnterStatement"); |
| 3018 | CodeForStatementPosition(node); |
| 3019 | Load(node->expression()); |
| 3020 | Result context; |
| 3021 | if (node->is_catch_block()) { |
| 3022 | context = frame_->CallRuntime(Runtime::kPushCatchContext, 1); |
| 3023 | } else { |
| 3024 | context = frame_->CallRuntime(Runtime::kPushContext, 1); |
| 3025 | } |
| 3026 | |
| 3027 | // Update context local. |
| 3028 | frame_->SaveContextRegister(); |
| 3029 | |
| 3030 | // Verify that the runtime call result and rsi agree. |
| 3031 | if (FLAG_debug_code) { |
| 3032 | __ cmpq(context.reg(), rsi); |
| 3033 | __ Assert(equal, "Runtime::NewContext should end up in rsi"); |
| 3034 | } |
| 3035 | } |
| 3036 | |
| 3037 | |
| 3038 | void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) { |
| 3039 | ASSERT(!in_spilled_code()); |
| 3040 | Comment cmnt(masm_, "[ WithExitStatement"); |
| 3041 | CodeForStatementPosition(node); |
| 3042 | // Pop context. |
| 3043 | __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX)); |
| 3044 | // Update context local. |
| 3045 | frame_->SaveContextRegister(); |
| 3046 | } |
| 3047 | |
| 3048 | |
| 3049 | void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3050 | ASSERT(!in_spilled_code()); |
| 3051 | Comment cmnt(masm_, "[ SwitchStatement"); |
| 3052 | CodeForStatementPosition(node); |
| 3053 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3054 | |
| 3055 | // Compile the switch value. |
| 3056 | Load(node->tag()); |
| 3057 | |
| 3058 | ZoneList<CaseClause*>* cases = node->cases(); |
| 3059 | int length = cases->length(); |
| 3060 | CaseClause* default_clause = NULL; |
| 3061 | |
| 3062 | JumpTarget next_test; |
| 3063 | // Compile the case label expressions and comparisons. Exit early |
| 3064 | // if a comparison is unconditionally true. The target next_test is |
| 3065 | // bound before the loop in order to indicate control flow to the |
| 3066 | // first comparison. |
| 3067 | next_test.Bind(); |
| 3068 | for (int i = 0; i < length && !next_test.is_unused(); i++) { |
| 3069 | CaseClause* clause = cases->at(i); |
| 3070 | // The default is not a test, but remember it for later. |
| 3071 | if (clause->is_default()) { |
| 3072 | default_clause = clause; |
| 3073 | continue; |
| 3074 | } |
| 3075 | |
| 3076 | Comment cmnt(masm_, "[ Case comparison"); |
| 3077 | // We recycle the same target next_test for each test. Bind it if |
| 3078 | // the previous test has not done so and then unuse it for the |
| 3079 | // loop. |
| 3080 | if (next_test.is_linked()) { |
| 3081 | next_test.Bind(); |
| 3082 | } |
| 3083 | next_test.Unuse(); |
| 3084 | |
| 3085 | // Duplicate the switch value. |
| 3086 | frame_->Dup(); |
| 3087 | |
| 3088 | // Compile the label expression. |
| 3089 | Load(clause->label()); |
| 3090 | |
| 3091 | // Compare and branch to the body if true or the next test if |
| 3092 | // false. Prefer the next test as a fall through. |
| 3093 | ControlDestination dest(clause->body_target(), &next_test, false); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 3094 | Comparison(node, equal, true, &dest); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3095 | |
| 3096 | // If the comparison fell through to the true target, jump to the |
| 3097 | // actual body. |
| 3098 | if (dest.true_was_fall_through()) { |
| 3099 | clause->body_target()->Unuse(); |
| 3100 | clause->body_target()->Jump(); |
| 3101 | } |
| 3102 | } |
| 3103 | |
| 3104 | // If there was control flow to a next test from the last one |
| 3105 | // compiled, compile a jump to the default or break target. |
| 3106 | if (!next_test.is_unused()) { |
| 3107 | if (next_test.is_linked()) { |
| 3108 | next_test.Bind(); |
| 3109 | } |
| 3110 | // Drop the switch value. |
| 3111 | frame_->Drop(); |
| 3112 | if (default_clause != NULL) { |
| 3113 | default_clause->body_target()->Jump(); |
| 3114 | } else { |
| 3115 | node->break_target()->Jump(); |
| 3116 | } |
| 3117 | } |
| 3118 | |
| 3119 | // The last instruction emitted was a jump, either to the default |
| 3120 | // clause or the break target, or else to a case body from the loop |
| 3121 | // that compiles the tests. |
| 3122 | ASSERT(!has_valid_frame()); |
| 3123 | // Compile case bodies as needed. |
| 3124 | for (int i = 0; i < length; i++) { |
| 3125 | CaseClause* clause = cases->at(i); |
| 3126 | |
| 3127 | // There are two ways to reach the body: from the corresponding |
| 3128 | // test or as the fall through of the previous body. |
| 3129 | if (clause->body_target()->is_linked() || has_valid_frame()) { |
| 3130 | if (clause->body_target()->is_linked()) { |
| 3131 | if (has_valid_frame()) { |
| 3132 | // If we have both a jump to the test and a fall through, put |
| 3133 | // a jump on the fall through path to avoid the dropping of |
| 3134 | // the switch value on the test path. The exception is the |
| 3135 | // default which has already had the switch value dropped. |
| 3136 | if (clause->is_default()) { |
| 3137 | clause->body_target()->Bind(); |
| 3138 | } else { |
| 3139 | JumpTarget body; |
| 3140 | body.Jump(); |
| 3141 | clause->body_target()->Bind(); |
| 3142 | frame_->Drop(); |
| 3143 | body.Bind(); |
| 3144 | } |
| 3145 | } else { |
| 3146 | // No fall through to worry about. |
| 3147 | clause->body_target()->Bind(); |
| 3148 | if (!clause->is_default()) { |
| 3149 | frame_->Drop(); |
| 3150 | } |
| 3151 | } |
| 3152 | } else { |
| 3153 | // Otherwise, we have only fall through. |
| 3154 | ASSERT(has_valid_frame()); |
| 3155 | } |
| 3156 | |
| 3157 | // We are now prepared to compile the body. |
| 3158 | Comment cmnt(masm_, "[ Case body"); |
| 3159 | VisitStatements(clause->statements()); |
| 3160 | } |
| 3161 | clause->body_target()->Unuse(); |
| 3162 | } |
| 3163 | |
| 3164 | // We may not have a valid frame here so bind the break target only |
| 3165 | // if needed. |
| 3166 | if (node->break_target()->is_linked()) { |
| 3167 | node->break_target()->Bind(); |
| 3168 | } |
| 3169 | node->break_target()->Unuse(); |
| 3170 | } |
| 3171 | |
| 3172 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3173 | void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3174 | ASSERT(!in_spilled_code()); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3175 | Comment cmnt(masm_, "[ DoWhileStatement"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3176 | CodeForStatementPosition(node); |
| 3177 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3178 | JumpTarget body(JumpTarget::BIDIRECTIONAL); |
| 3179 | IncrementLoopNesting(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3180 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3181 | ConditionAnalysis info = AnalyzeCondition(node->cond()); |
| 3182 | // Label the top of the loop for the backward jump if necessary. |
| 3183 | switch (info) { |
| 3184 | case ALWAYS_TRUE: |
| 3185 | // Use the continue target. |
| 3186 | node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 3187 | node->continue_target()->Bind(); |
| 3188 | break; |
| 3189 | case ALWAYS_FALSE: |
| 3190 | // No need to label it. |
| 3191 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3192 | break; |
| 3193 | case DONT_KNOW: |
| 3194 | // Continue is the test, so use the backward body target. |
| 3195 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3196 | body.Bind(); |
| 3197 | break; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3198 | } |
| 3199 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3200 | CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 3201 | Visit(node->body()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3202 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3203 | // Compile the test. |
| 3204 | switch (info) { |
| 3205 | case ALWAYS_TRUE: |
| 3206 | // If control flow can fall off the end of the body, jump back |
| 3207 | // to the top and bind the break target at the exit. |
| 3208 | if (has_valid_frame()) { |
| 3209 | node->continue_target()->Jump(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3210 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3211 | if (node->break_target()->is_linked()) { |
| 3212 | node->break_target()->Bind(); |
| 3213 | } |
| 3214 | break; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3215 | case ALWAYS_FALSE: |
| 3216 | // We may have had continues or breaks in the body. |
| 3217 | if (node->continue_target()->is_linked()) { |
| 3218 | node->continue_target()->Bind(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3219 | } |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3220 | if (node->break_target()->is_linked()) { |
| 3221 | node->break_target()->Bind(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3222 | } |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3223 | break; |
| 3224 | case DONT_KNOW: |
| 3225 | // We have to compile the test expression if it can be reached by |
| 3226 | // control flow falling out of the body or via continue. |
| 3227 | if (node->continue_target()->is_linked()) { |
| 3228 | node->continue_target()->Bind(); |
| 3229 | } |
| 3230 | if (has_valid_frame()) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3231 | Comment cmnt(masm_, "[ DoWhileCondition"); |
| 3232 | CodeForDoWhileConditionPosition(node); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3233 | ControlDestination dest(&body, node->break_target(), false); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3234 | LoadCondition(node->cond(), &dest, true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3235 | } |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3236 | if (node->break_target()->is_linked()) { |
| 3237 | node->break_target()->Bind(); |
| 3238 | } |
| 3239 | break; |
| 3240 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3241 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3242 | DecrementLoopNesting(); |
| 3243 | node->continue_target()->Unuse(); |
| 3244 | node->break_target()->Unuse(); |
| 3245 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3246 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3247 | |
| 3248 | void CodeGenerator::VisitWhileStatement(WhileStatement* node) { |
| 3249 | ASSERT(!in_spilled_code()); |
| 3250 | Comment cmnt(masm_, "[ WhileStatement"); |
| 3251 | CodeForStatementPosition(node); |
| 3252 | |
| 3253 | // If the condition is always false and has no side effects, we do not |
| 3254 | // need to compile anything. |
| 3255 | ConditionAnalysis info = AnalyzeCondition(node->cond()); |
| 3256 | if (info == ALWAYS_FALSE) return; |
| 3257 | |
| 3258 | // Do not duplicate conditions that may have function literal |
| 3259 | // subexpressions. This can cause us to compile the function literal |
| 3260 | // twice. |
| 3261 | bool test_at_bottom = !node->may_have_function_literal(); |
| 3262 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3263 | IncrementLoopNesting(); |
| 3264 | JumpTarget body; |
| 3265 | if (test_at_bottom) { |
| 3266 | body.set_direction(JumpTarget::BIDIRECTIONAL); |
| 3267 | } |
| 3268 | |
| 3269 | // Based on the condition analysis, compile the test as necessary. |
| 3270 | switch (info) { |
| 3271 | case ALWAYS_TRUE: |
| 3272 | // We will not compile the test expression. Label the top of the |
| 3273 | // loop with the continue target. |
| 3274 | node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 3275 | node->continue_target()->Bind(); |
| 3276 | break; |
| 3277 | case DONT_KNOW: { |
| 3278 | if (test_at_bottom) { |
| 3279 | // Continue is the test at the bottom, no need to label the test |
| 3280 | // at the top. The body is a backward target. |
| 3281 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3282 | } else { |
| 3283 | // Label the test at the top as the continue target. The body |
| 3284 | // is a forward-only target. |
| 3285 | node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 3286 | node->continue_target()->Bind(); |
| 3287 | } |
| 3288 | // Compile the test with the body as the true target and preferred |
| 3289 | // fall-through and with the break target as the false target. |
| 3290 | ControlDestination dest(&body, node->break_target(), true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3291 | LoadCondition(node->cond(), &dest, true); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3292 | |
| 3293 | if (dest.false_was_fall_through()) { |
| 3294 | // If we got the break target as fall-through, the test may have |
| 3295 | // been unconditionally false (if there are no jumps to the |
| 3296 | // body). |
| 3297 | if (!body.is_linked()) { |
| 3298 | DecrementLoopNesting(); |
| 3299 | return; |
| 3300 | } |
| 3301 | |
| 3302 | // Otherwise, jump around the body on the fall through and then |
| 3303 | // bind the body target. |
| 3304 | node->break_target()->Unuse(); |
| 3305 | node->break_target()->Jump(); |
| 3306 | body.Bind(); |
| 3307 | } |
| 3308 | break; |
| 3309 | } |
| 3310 | case ALWAYS_FALSE: |
| 3311 | UNREACHABLE(); |
| 3312 | break; |
| 3313 | } |
| 3314 | |
| 3315 | CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 3316 | Visit(node->body()); |
| 3317 | |
| 3318 | // Based on the condition analysis, compile the backward jump as |
| 3319 | // necessary. |
| 3320 | switch (info) { |
| 3321 | case ALWAYS_TRUE: |
| 3322 | // The loop body has been labeled with the continue target. |
| 3323 | if (has_valid_frame()) { |
| 3324 | node->continue_target()->Jump(); |
| 3325 | } |
| 3326 | break; |
| 3327 | case DONT_KNOW: |
| 3328 | if (test_at_bottom) { |
| 3329 | // If we have chosen to recompile the test at the bottom, |
| 3330 | // then it is the continue target. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3331 | if (node->continue_target()->is_linked()) { |
| 3332 | node->continue_target()->Bind(); |
| 3333 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3334 | if (has_valid_frame()) { |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3335 | // The break target is the fall-through (body is a backward |
| 3336 | // jump from here and thus an invalid fall-through). |
| 3337 | ControlDestination dest(&body, node->break_target(), false); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3338 | LoadCondition(node->cond(), &dest, true); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3339 | } |
| 3340 | } else { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3341 | // If we have chosen not to recompile the test at the bottom, |
| 3342 | // jump back to the one at the top. |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3343 | if (has_valid_frame()) { |
| 3344 | node->continue_target()->Jump(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3345 | } |
| 3346 | } |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3347 | break; |
| 3348 | case ALWAYS_FALSE: |
| 3349 | UNREACHABLE(); |
| 3350 | break; |
| 3351 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3352 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3353 | // The break target may be already bound (by the condition), or there |
| 3354 | // may not be a valid frame. Bind it only if needed. |
| 3355 | if (node->break_target()->is_linked()) { |
| 3356 | node->break_target()->Bind(); |
| 3357 | } |
| 3358 | DecrementLoopNesting(); |
| 3359 | } |
| 3360 | |
| 3361 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3362 | void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) { |
| 3363 | ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER); |
| 3364 | if (slot->type() == Slot::LOCAL) { |
| 3365 | frame_->SetTypeForLocalAt(slot->index(), info); |
| 3366 | } else { |
| 3367 | frame_->SetTypeForParamAt(slot->index(), info); |
| 3368 | } |
| 3369 | if (FLAG_debug_code && info.IsSmi()) { |
| 3370 | if (slot->type() == Slot::LOCAL) { |
| 3371 | frame_->PushLocalAt(slot->index()); |
| 3372 | } else { |
| 3373 | frame_->PushParameterAt(slot->index()); |
| 3374 | } |
| 3375 | Result var = frame_->Pop(); |
| 3376 | var.ToRegister(); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 3377 | __ AbortIfNotSmi(var.reg()); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3378 | } |
| 3379 | } |
| 3380 | |
| 3381 | |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3382 | void CodeGenerator::GenerateFastSmiLoop(ForStatement* node) { |
| 3383 | // A fast smi loop is a for loop with an initializer |
| 3384 | // that is a simple assignment of a smi to a stack variable, |
| 3385 | // a test that is a simple test of that variable against a smi constant, |
| 3386 | // and a step that is a increment/decrement of the variable, and |
| 3387 | // where the variable isn't modified in the loop body. |
| 3388 | // This guarantees that the variable is always a smi. |
| 3389 | |
| 3390 | Variable* loop_var = node->loop_variable(); |
| 3391 | Smi* initial_value = *Handle<Smi>::cast(node->init() |
| 3392 | ->StatementAsSimpleAssignment()->value()->AsLiteral()->handle()); |
| 3393 | Smi* limit_value = *Handle<Smi>::cast( |
| 3394 | node->cond()->AsCompareOperation()->right()->AsLiteral()->handle()); |
| 3395 | Token::Value compare_op = |
| 3396 | node->cond()->AsCompareOperation()->op(); |
| 3397 | bool increments = |
| 3398 | node->next()->StatementAsCountOperation()->op() == Token::INC; |
| 3399 | |
| 3400 | // Check that the condition isn't initially false. |
| 3401 | bool initially_false = false; |
| 3402 | int initial_int_value = initial_value->value(); |
| 3403 | int limit_int_value = limit_value->value(); |
| 3404 | switch (compare_op) { |
| 3405 | case Token::LT: |
| 3406 | initially_false = initial_int_value >= limit_int_value; |
| 3407 | break; |
| 3408 | case Token::LTE: |
| 3409 | initially_false = initial_int_value > limit_int_value; |
| 3410 | break; |
| 3411 | case Token::GT: |
| 3412 | initially_false = initial_int_value <= limit_int_value; |
| 3413 | break; |
| 3414 | case Token::GTE: |
| 3415 | initially_false = initial_int_value < limit_int_value; |
| 3416 | break; |
| 3417 | default: |
| 3418 | UNREACHABLE(); |
| 3419 | } |
| 3420 | if (initially_false) return; |
| 3421 | |
| 3422 | // Only check loop condition at the end. |
| 3423 | |
| 3424 | Visit(node->init()); |
| 3425 | |
| 3426 | JumpTarget loop(JumpTarget::BIDIRECTIONAL); |
| 3427 | // Set type and stack height of BreakTargets. |
| 3428 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3429 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3430 | |
| 3431 | IncrementLoopNesting(); |
| 3432 | loop.Bind(); |
| 3433 | |
| 3434 | // Set number type of the loop variable to smi. |
| 3435 | CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 3436 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 3437 | SetTypeForStackSlot(loop_var->AsSlot(), TypeInfo::Smi()); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3438 | Visit(node->body()); |
| 3439 | |
| 3440 | if (node->continue_target()->is_linked()) { |
| 3441 | node->continue_target()->Bind(); |
| 3442 | } |
| 3443 | |
| 3444 | if (has_valid_frame()) { |
| 3445 | CodeForStatementPosition(node); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 3446 | Slot* loop_var_slot = loop_var->AsSlot(); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3447 | if (loop_var_slot->type() == Slot::LOCAL) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3448 | frame_->TakeLocalAt(loop_var_slot->index()); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3449 | } else { |
| 3450 | ASSERT(loop_var_slot->type() == Slot::PARAMETER); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3451 | frame_->TakeParameterAt(loop_var_slot->index()); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3452 | } |
| 3453 | Result loop_var_result = frame_->Pop(); |
| 3454 | if (!loop_var_result.is_register()) { |
| 3455 | loop_var_result.ToRegister(); |
| 3456 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3457 | Register loop_var_reg = loop_var_result.reg(); |
| 3458 | frame_->Spill(loop_var_reg); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3459 | if (increments) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3460 | __ SmiAddConstant(loop_var_reg, |
| 3461 | loop_var_reg, |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3462 | Smi::FromInt(1)); |
| 3463 | } else { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3464 | __ SmiSubConstant(loop_var_reg, |
| 3465 | loop_var_reg, |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3466 | Smi::FromInt(1)); |
| 3467 | } |
| 3468 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3469 | frame_->Push(&loop_var_result); |
| 3470 | if (loop_var_slot->type() == Slot::LOCAL) { |
| 3471 | frame_->StoreToLocalAt(loop_var_slot->index()); |
| 3472 | } else { |
| 3473 | ASSERT(loop_var_slot->type() == Slot::PARAMETER); |
| 3474 | frame_->StoreToParameterAt(loop_var_slot->index()); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3475 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3476 | frame_->Drop(); |
| 3477 | |
| 3478 | __ SmiCompare(loop_var_reg, limit_value); |
| 3479 | Condition condition; |
| 3480 | switch (compare_op) { |
| 3481 | case Token::LT: |
| 3482 | condition = less; |
| 3483 | break; |
| 3484 | case Token::LTE: |
| 3485 | condition = less_equal; |
| 3486 | break; |
| 3487 | case Token::GT: |
| 3488 | condition = greater; |
| 3489 | break; |
| 3490 | case Token::GTE: |
| 3491 | condition = greater_equal; |
| 3492 | break; |
| 3493 | default: |
| 3494 | condition = never; |
| 3495 | UNREACHABLE(); |
| 3496 | } |
| 3497 | loop.Branch(condition); |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3498 | } |
| 3499 | if (node->break_target()->is_linked()) { |
| 3500 | node->break_target()->Bind(); |
| 3501 | } |
| 3502 | DecrementLoopNesting(); |
| 3503 | } |
| 3504 | |
| 3505 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3506 | void CodeGenerator::VisitForStatement(ForStatement* node) { |
| 3507 | ASSERT(!in_spilled_code()); |
| 3508 | Comment cmnt(masm_, "[ ForStatement"); |
| 3509 | CodeForStatementPosition(node); |
| 3510 | |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 3511 | if (node->is_fast_smi_loop()) { |
| 3512 | GenerateFastSmiLoop(node); |
| 3513 | return; |
| 3514 | } |
| 3515 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3516 | // Compile the init expression if present. |
| 3517 | if (node->init() != NULL) { |
| 3518 | Visit(node->init()); |
| 3519 | } |
| 3520 | |
| 3521 | // If the condition is always false and has no side effects, we do not |
| 3522 | // need to compile anything else. |
| 3523 | ConditionAnalysis info = AnalyzeCondition(node->cond()); |
| 3524 | if (info == ALWAYS_FALSE) return; |
| 3525 | |
| 3526 | // Do not duplicate conditions that may have function literal |
| 3527 | // subexpressions. This can cause us to compile the function literal |
| 3528 | // twice. |
| 3529 | bool test_at_bottom = !node->may_have_function_literal(); |
| 3530 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3531 | IncrementLoopNesting(); |
| 3532 | |
| 3533 | // Target for backward edge if no test at the bottom, otherwise |
| 3534 | // unused. |
| 3535 | JumpTarget loop(JumpTarget::BIDIRECTIONAL); |
| 3536 | |
| 3537 | // Target for backward edge if there is a test at the bottom, |
| 3538 | // otherwise used as target for test at the top. |
| 3539 | JumpTarget body; |
| 3540 | if (test_at_bottom) { |
| 3541 | body.set_direction(JumpTarget::BIDIRECTIONAL); |
| 3542 | } |
| 3543 | |
| 3544 | // Based on the condition analysis, compile the test as necessary. |
| 3545 | switch (info) { |
| 3546 | case ALWAYS_TRUE: |
| 3547 | // We will not compile the test expression. Label the top of the |
| 3548 | // loop. |
| 3549 | if (node->next() == NULL) { |
| 3550 | // Use the continue target if there is no update expression. |
| 3551 | node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 3552 | node->continue_target()->Bind(); |
| 3553 | } else { |
| 3554 | // Otherwise use the backward loop target. |
| 3555 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3556 | loop.Bind(); |
| 3557 | } |
| 3558 | break; |
| 3559 | case DONT_KNOW: { |
| 3560 | if (test_at_bottom) { |
| 3561 | // Continue is either the update expression or the test at the |
| 3562 | // bottom, no need to label the test at the top. |
| 3563 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3564 | } else if (node->next() == NULL) { |
| 3565 | // We are not recompiling the test at the bottom and there is no |
| 3566 | // update expression. |
| 3567 | node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); |
| 3568 | node->continue_target()->Bind(); |
| 3569 | } else { |
| 3570 | // We are not recompiling the test at the bottom and there is an |
| 3571 | // update expression. |
| 3572 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3573 | loop.Bind(); |
| 3574 | } |
| 3575 | |
| 3576 | // Compile the test with the body as the true target and preferred |
| 3577 | // fall-through and with the break target as the false target. |
| 3578 | ControlDestination dest(&body, node->break_target(), true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3579 | LoadCondition(node->cond(), &dest, true); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3580 | |
| 3581 | if (dest.false_was_fall_through()) { |
| 3582 | // If we got the break target as fall-through, the test may have |
| 3583 | // been unconditionally false (if there are no jumps to the |
| 3584 | // body). |
| 3585 | if (!body.is_linked()) { |
| 3586 | DecrementLoopNesting(); |
| 3587 | return; |
| 3588 | } |
| 3589 | |
| 3590 | // Otherwise, jump around the body on the fall through and then |
| 3591 | // bind the body target. |
| 3592 | node->break_target()->Unuse(); |
| 3593 | node->break_target()->Jump(); |
| 3594 | body.Bind(); |
| 3595 | } |
| 3596 | break; |
| 3597 | } |
| 3598 | case ALWAYS_FALSE: |
| 3599 | UNREACHABLE(); |
| 3600 | break; |
| 3601 | } |
| 3602 | |
| 3603 | CheckStack(); // TODO(1222600): ignore if body contains calls. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 3604 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3605 | Visit(node->body()); |
| 3606 | |
| 3607 | // If there is an update expression, compile it if necessary. |
| 3608 | if (node->next() != NULL) { |
| 3609 | if (node->continue_target()->is_linked()) { |
| 3610 | node->continue_target()->Bind(); |
| 3611 | } |
| 3612 | |
| 3613 | // Control can reach the update by falling out of the body or by a |
| 3614 | // continue. |
| 3615 | if (has_valid_frame()) { |
| 3616 | // Record the source position of the statement as this code which |
| 3617 | // is after the code for the body actually belongs to the loop |
| 3618 | // statement and not the body. |
| 3619 | CodeForStatementPosition(node); |
| 3620 | Visit(node->next()); |
| 3621 | } |
| 3622 | } |
| 3623 | |
| 3624 | // Based on the condition analysis, compile the backward jump as |
| 3625 | // necessary. |
| 3626 | switch (info) { |
| 3627 | case ALWAYS_TRUE: |
| 3628 | if (has_valid_frame()) { |
| 3629 | if (node->next() == NULL) { |
| 3630 | node->continue_target()->Jump(); |
| 3631 | } else { |
| 3632 | loop.Jump(); |
| 3633 | } |
| 3634 | } |
| 3635 | break; |
| 3636 | case DONT_KNOW: |
| 3637 | if (test_at_bottom) { |
| 3638 | if (node->continue_target()->is_linked()) { |
| 3639 | // We can have dangling jumps to the continue target if there |
| 3640 | // was no update expression. |
| 3641 | node->continue_target()->Bind(); |
| 3642 | } |
| 3643 | // Control can reach the test at the bottom by falling out of |
| 3644 | // the body, by a continue in the body, or from the update |
| 3645 | // expression. |
| 3646 | if (has_valid_frame()) { |
| 3647 | // The break target is the fall-through (body is a backward |
| 3648 | // jump from here). |
| 3649 | ControlDestination dest(&body, node->break_target(), false); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3650 | LoadCondition(node->cond(), &dest, true); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3651 | } |
| 3652 | } else { |
| 3653 | // Otherwise, jump back to the test at the top. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3654 | if (has_valid_frame()) { |
| 3655 | if (node->next() == NULL) { |
| 3656 | node->continue_target()->Jump(); |
| 3657 | } else { |
| 3658 | loop.Jump(); |
| 3659 | } |
| 3660 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3661 | } |
| 3662 | break; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3663 | case ALWAYS_FALSE: |
| 3664 | UNREACHABLE(); |
| 3665 | break; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3666 | } |
| 3667 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3668 | // The break target may be already bound (by the condition), or there |
| 3669 | // may not be a valid frame. Bind it only if needed. |
| 3670 | if (node->break_target()->is_linked()) { |
| 3671 | node->break_target()->Bind(); |
| 3672 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3673 | DecrementLoopNesting(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3674 | } |
| 3675 | |
| 3676 | |
| 3677 | void CodeGenerator::VisitForInStatement(ForInStatement* node) { |
| 3678 | ASSERT(!in_spilled_code()); |
| 3679 | VirtualFrame::SpilledScope spilled_scope; |
| 3680 | Comment cmnt(masm_, "[ ForInStatement"); |
| 3681 | CodeForStatementPosition(node); |
| 3682 | |
| 3683 | JumpTarget primitive; |
| 3684 | JumpTarget jsobject; |
| 3685 | JumpTarget fixed_array; |
| 3686 | JumpTarget entry(JumpTarget::BIDIRECTIONAL); |
| 3687 | JumpTarget end_del_check; |
| 3688 | JumpTarget exit; |
| 3689 | |
| 3690 | // Get the object to enumerate over (converted to JSObject). |
| 3691 | LoadAndSpill(node->enumerable()); |
| 3692 | |
| 3693 | // Both SpiderMonkey and kjs ignore null and undefined in contrast |
| 3694 | // to the specification. 12.6.4 mandates a call to ToObject. |
| 3695 | frame_->EmitPop(rax); |
| 3696 | |
| 3697 | // rax: value to be iterated over |
| 3698 | __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); |
| 3699 | exit.Branch(equal); |
| 3700 | __ CompareRoot(rax, Heap::kNullValueRootIndex); |
| 3701 | exit.Branch(equal); |
| 3702 | |
| 3703 | // Stack layout in body: |
| 3704 | // [iteration counter (smi)] <- slot 0 |
| 3705 | // [length of array] <- slot 1 |
| 3706 | // [FixedArray] <- slot 2 |
| 3707 | // [Map or 0] <- slot 3 |
| 3708 | // [Object] <- slot 4 |
| 3709 | |
| 3710 | // Check if enumerable is already a JSObject |
| 3711 | // rax: value to be iterated over |
| 3712 | Condition is_smi = masm_->CheckSmi(rax); |
| 3713 | primitive.Branch(is_smi); |
| 3714 | __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); |
| 3715 | jsobject.Branch(above_equal); |
| 3716 | |
| 3717 | primitive.Bind(); |
| 3718 | frame_->EmitPush(rax); |
| 3719 | frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1); |
| 3720 | // function call returns the value in rax, which is where we want it below |
| 3721 | |
| 3722 | jsobject.Bind(); |
| 3723 | // Get the set of properties (as a FixedArray or Map). |
| 3724 | // rax: value to be iterated over |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3725 | frame_->EmitPush(rax); // Push the object being iterated over. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3726 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3727 | |
| 3728 | // Check cache validity in generated code. This is a fast case for |
| 3729 | // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 3730 | // guarantee cache validity, call the runtime system to check cache |
| 3731 | // validity or get the property names in a fixed array. |
| 3732 | JumpTarget call_runtime; |
| 3733 | JumpTarget loop(JumpTarget::BIDIRECTIONAL); |
| 3734 | JumpTarget check_prototype; |
| 3735 | JumpTarget use_cache; |
| 3736 | __ movq(rcx, rax); |
| 3737 | loop.Bind(); |
| 3738 | // Check that there are no elements. |
| 3739 | __ movq(rdx, FieldOperand(rcx, JSObject::kElementsOffset)); |
| 3740 | __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex); |
| 3741 | call_runtime.Branch(not_equal); |
| 3742 | // Check that instance descriptors are not empty so that we can |
| 3743 | // check for an enum cache. Leave the map in ebx for the subsequent |
| 3744 | // prototype load. |
| 3745 | __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset)); |
| 3746 | __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset)); |
| 3747 | __ CompareRoot(rdx, Heap::kEmptyDescriptorArrayRootIndex); |
| 3748 | call_runtime.Branch(equal); |
| 3749 | // Check that there in an enum cache in the non-empty instance |
| 3750 | // descriptors. This is the case if the next enumeration index |
| 3751 | // field does not contain a smi. |
| 3752 | __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset)); |
| 3753 | is_smi = masm_->CheckSmi(rdx); |
| 3754 | call_runtime.Branch(is_smi); |
| 3755 | // For all objects but the receiver, check that the cache is empty. |
| 3756 | __ cmpq(rcx, rax); |
| 3757 | check_prototype.Branch(equal); |
| 3758 | __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 3759 | __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex); |
| 3760 | call_runtime.Branch(not_equal); |
| 3761 | check_prototype.Bind(); |
| 3762 | // Load the prototype from the map and loop if non-null. |
| 3763 | __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); |
| 3764 | __ CompareRoot(rcx, Heap::kNullValueRootIndex); |
| 3765 | loop.Branch(not_equal); |
| 3766 | // The enum cache is valid. Load the map of the object being |
| 3767 | // iterated over and use the cache for the iteration. |
| 3768 | __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); |
| 3769 | use_cache.Jump(); |
| 3770 | |
| 3771 | call_runtime.Bind(); |
| 3772 | // Call the runtime to get the property names for the object. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3773 | frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call |
| 3774 | frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
| 3775 | |
| 3776 | // If we got a Map, we can do a fast modification check. |
| 3777 | // Otherwise, we got a FixedArray, and we have to do a slow check. |
| 3778 | // rax: map or fixed array (result from call to |
| 3779 | // Runtime::kGetPropertyNamesFast) |
| 3780 | __ movq(rdx, rax); |
| 3781 | __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); |
| 3782 | __ CompareRoot(rcx, Heap::kMetaMapRootIndex); |
| 3783 | fixed_array.Branch(not_equal); |
| 3784 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3785 | use_cache.Bind(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3786 | // Get enum cache |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 3787 | // rax: map (either the result from a call to |
| 3788 | // Runtime::kGetPropertyNamesFast or has been fetched directly from |
| 3789 | // the object) |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3790 | __ movq(rcx, rax); |
| 3791 | __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset)); |
| 3792 | // Get the bridge array held in the enumeration index field. |
| 3793 | __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); |
| 3794 | // Get the cache from the bridge array. |
| 3795 | __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 3796 | |
| 3797 | frame_->EmitPush(rax); // <- slot 3 |
| 3798 | frame_->EmitPush(rdx); // <- slot 2 |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 3799 | __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3800 | frame_->EmitPush(rax); // <- slot 1 |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3801 | frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3802 | entry.Jump(); |
| 3803 | |
| 3804 | fixed_array.Bind(); |
| 3805 | // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast) |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3806 | frame_->EmitPush(Smi::FromInt(0)); // <- slot 3 |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3807 | frame_->EmitPush(rax); // <- slot 2 |
| 3808 | |
| 3809 | // Push the length of the array and the initial index onto the stack. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 3810 | __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3811 | frame_->EmitPush(rax); // <- slot 1 |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3812 | frame_->EmitPush(Smi::FromInt(0)); // <- slot 0 |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3813 | |
| 3814 | // Condition. |
| 3815 | entry.Bind(); |
| 3816 | // Grab the current frame's height for the break and continue |
| 3817 | // targets only after all the state is pushed on the frame. |
| 3818 | node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3819 | node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); |
| 3820 | |
| 3821 | __ movq(rax, frame_->ElementAt(0)); // load the current count |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3822 | __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length |
| 3823 | node->break_target()->Branch(below_equal); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3824 | |
| 3825 | // Get the i'th entry of the array. |
| 3826 | __ movq(rdx, frame_->ElementAt(2)); |
| 3827 | SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2); |
| 3828 | __ movq(rbx, |
| 3829 | FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize)); |
| 3830 | |
| 3831 | // Get the expected map from the stack or a zero map in the |
| 3832 | // permanent slow case rax: current iteration count rbx: i'th entry |
| 3833 | // of the enum cache |
| 3834 | __ movq(rdx, frame_->ElementAt(3)); |
| 3835 | // Check if the expected map still matches that of the enumerable. |
| 3836 | // If not, we have to filter the key. |
| 3837 | // rax: current iteration count |
| 3838 | // rbx: i'th entry of the enum cache |
| 3839 | // rdx: expected map value |
| 3840 | __ movq(rcx, frame_->ElementAt(4)); |
| 3841 | __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
| 3842 | __ cmpq(rcx, rdx); |
| 3843 | end_del_check.Branch(equal); |
| 3844 | |
| 3845 | // Convert the entry to a string (or null if it isn't a property anymore). |
| 3846 | frame_->EmitPush(frame_->ElementAt(4)); // push enumerable |
| 3847 | frame_->EmitPush(rbx); // push entry |
| 3848 | frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2); |
| 3849 | __ movq(rbx, rax); |
| 3850 | |
| 3851 | // If the property has been removed while iterating, we just skip it. |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 3852 | __ SmiCompare(rbx, Smi::FromInt(0)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3853 | node->continue_target()->Branch(equal); |
| 3854 | |
| 3855 | end_del_check.Bind(); |
| 3856 | // Store the entry in the 'each' expression and take another spin in the |
| 3857 | // loop. rdx: i'th entry of the enum cache (or string there of) |
| 3858 | frame_->EmitPush(rbx); |
| 3859 | { Reference each(this, node->each()); |
| 3860 | // Loading a reference may leave the frame in an unspilled state. |
| 3861 | frame_->SpillAll(); |
| 3862 | if (!each.is_illegal()) { |
| 3863 | if (each.size() > 0) { |
| 3864 | frame_->EmitPush(frame_->ElementAt(each.size())); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 3865 | each.SetValue(NOT_CONST_INIT); |
| 3866 | frame_->Drop(2); // Drop the original and the copy of the element. |
| 3867 | } else { |
| 3868 | // If the reference has size zero then we can use the value below |
| 3869 | // the reference as if it were above the reference, instead of pushing |
| 3870 | // a new copy of it above the reference. |
| 3871 | each.SetValue(NOT_CONST_INIT); |
| 3872 | frame_->Drop(); // Drop the original of the element. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3873 | } |
| 3874 | } |
| 3875 | } |
| 3876 | // Unloading a reference may leave the frame in an unspilled state. |
| 3877 | frame_->SpillAll(); |
| 3878 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3879 | // Body. |
| 3880 | CheckStack(); // TODO(1222600): ignore if body contains calls. |
| 3881 | VisitAndSpill(node->body()); |
| 3882 | |
| 3883 | // Next. Reestablish a spilled frame in case we are coming here via |
| 3884 | // a continue in the body. |
| 3885 | node->continue_target()->Bind(); |
| 3886 | frame_->SpillAll(); |
| 3887 | frame_->EmitPop(rax); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3888 | __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3889 | frame_->EmitPush(rax); |
| 3890 | entry.Jump(); |
| 3891 | |
| 3892 | // Cleanup. No need to spill because VirtualFrame::Drop is safe for |
| 3893 | // any frame. |
| 3894 | node->break_target()->Bind(); |
| 3895 | frame_->Drop(5); |
| 3896 | |
| 3897 | // Exit. |
| 3898 | exit.Bind(); |
| 3899 | |
| 3900 | node->continue_target()->Unuse(); |
| 3901 | node->break_target()->Unuse(); |
| 3902 | } |
| 3903 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 3904 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3905 | void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3906 | ASSERT(!in_spilled_code()); |
| 3907 | VirtualFrame::SpilledScope spilled_scope; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 3908 | Comment cmnt(masm_, "[ TryCatchStatement"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3909 | CodeForStatementPosition(node); |
| 3910 | |
| 3911 | JumpTarget try_block; |
| 3912 | JumpTarget exit; |
| 3913 | |
| 3914 | try_block.Call(); |
| 3915 | // --- Catch block --- |
| 3916 | frame_->EmitPush(rax); |
| 3917 | |
| 3918 | // Store the caught exception in the catch variable. |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 3919 | Variable* catch_var = node->catch_var()->var(); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 3920 | ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL); |
| 3921 | StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3922 | |
| 3923 | // Remove the exception from the stack. |
| 3924 | frame_->Drop(); |
| 3925 | |
| 3926 | VisitStatementsAndSpill(node->catch_block()->statements()); |
| 3927 | if (has_valid_frame()) { |
| 3928 | exit.Jump(); |
| 3929 | } |
| 3930 | |
| 3931 | |
| 3932 | // --- Try block --- |
| 3933 | try_block.Bind(); |
| 3934 | |
| 3935 | frame_->PushTryHandler(TRY_CATCH_HANDLER); |
| 3936 | int handler_height = frame_->height(); |
| 3937 | |
| 3938 | // Shadow the jump targets for all escapes from the try block, including |
| 3939 | // returns. During shadowing, the original target is hidden as the |
| 3940 | // ShadowTarget and operations on the original actually affect the |
| 3941 | // shadowing target. |
| 3942 | // |
| 3943 | // We should probably try to unify the escaping targets and the return |
| 3944 | // target. |
| 3945 | int nof_escapes = node->escaping_targets()->length(); |
| 3946 | List<ShadowTarget*> shadows(1 + nof_escapes); |
| 3947 | |
| 3948 | // Add the shadow target for the function return. |
| 3949 | static const int kReturnShadowIndex = 0; |
| 3950 | shadows.Add(new ShadowTarget(&function_return_)); |
| 3951 | bool function_return_was_shadowed = function_return_is_shadowed_; |
| 3952 | function_return_is_shadowed_ = true; |
| 3953 | ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 3954 | |
| 3955 | // Add the remaining shadow targets. |
| 3956 | for (int i = 0; i < nof_escapes; i++) { |
| 3957 | shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 3958 | } |
| 3959 | |
| 3960 | // Generate code for the statements in the try block. |
| 3961 | VisitStatementsAndSpill(node->try_block()->statements()); |
| 3962 | |
| 3963 | // Stop the introduced shadowing and count the number of required unlinks. |
| 3964 | // After shadowing stops, the original targets are unshadowed and the |
| 3965 | // ShadowTargets represent the formerly shadowing targets. |
| 3966 | bool has_unlinks = false; |
| 3967 | for (int i = 0; i < shadows.length(); i++) { |
| 3968 | shadows[i]->StopShadowing(); |
| 3969 | has_unlinks = has_unlinks || shadows[i]->is_linked(); |
| 3970 | } |
| 3971 | function_return_is_shadowed_ = function_return_was_shadowed; |
| 3972 | |
| 3973 | // Get an external reference to the handler address. |
| 3974 | ExternalReference handler_address(Top::k_handler_address); |
| 3975 | |
| 3976 | // Make sure that there's nothing left on the stack above the |
| 3977 | // handler structure. |
| 3978 | if (FLAG_debug_code) { |
| 3979 | __ movq(kScratchRegister, handler_address); |
| 3980 | __ cmpq(rsp, Operand(kScratchRegister, 0)); |
| 3981 | __ Assert(equal, "stack pointer should point to top handler"); |
| 3982 | } |
| 3983 | |
| 3984 | // If we can fall off the end of the try block, unlink from try chain. |
| 3985 | if (has_valid_frame()) { |
| 3986 | // The next handler address is on top of the frame. Unlink from |
| 3987 | // the handler list and drop the rest of this handler from the |
| 3988 | // frame. |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 3989 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 3990 | __ movq(kScratchRegister, handler_address); |
| 3991 | frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 3992 | frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 3993 | if (has_unlinks) { |
| 3994 | exit.Jump(); |
| 3995 | } |
| 3996 | } |
| 3997 | |
| 3998 | // Generate unlink code for the (formerly) shadowing targets that |
| 3999 | // have been jumped to. Deallocate each shadow target. |
| 4000 | Result return_value; |
| 4001 | for (int i = 0; i < shadows.length(); i++) { |
| 4002 | if (shadows[i]->is_linked()) { |
| 4003 | // Unlink from try chain; be careful not to destroy the TOS if |
| 4004 | // there is one. |
| 4005 | if (i == kReturnShadowIndex) { |
| 4006 | shadows[i]->Bind(&return_value); |
| 4007 | return_value.ToRegister(rax); |
| 4008 | } else { |
| 4009 | shadows[i]->Bind(); |
| 4010 | } |
| 4011 | // Because we can be jumping here (to spilled code) from |
| 4012 | // unspilled code, we need to reestablish a spilled frame at |
| 4013 | // this block. |
| 4014 | frame_->SpillAll(); |
| 4015 | |
| 4016 | // Reload sp from the top handler, because some statements that we |
| 4017 | // break from (eg, for...in) may have left stuff on the stack. |
| 4018 | __ movq(kScratchRegister, handler_address); |
| 4019 | __ movq(rsp, Operand(kScratchRegister, 0)); |
| 4020 | frame_->Forget(frame_->height() - handler_height); |
| 4021 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 4022 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4023 | __ movq(kScratchRegister, handler_address); |
| 4024 | frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 4025 | frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 4026 | |
| 4027 | if (i == kReturnShadowIndex) { |
| 4028 | if (!function_return_is_shadowed_) frame_->PrepareForReturn(); |
| 4029 | shadows[i]->other_target()->Jump(&return_value); |
| 4030 | } else { |
| 4031 | shadows[i]->other_target()->Jump(); |
| 4032 | } |
| 4033 | } |
| 4034 | } |
| 4035 | |
| 4036 | exit.Bind(); |
| 4037 | } |
| 4038 | |
| 4039 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4040 | void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4041 | ASSERT(!in_spilled_code()); |
| 4042 | VirtualFrame::SpilledScope spilled_scope; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4043 | Comment cmnt(masm_, "[ TryFinallyStatement"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4044 | CodeForStatementPosition(node); |
| 4045 | |
| 4046 | // State: Used to keep track of reason for entering the finally |
| 4047 | // block. Should probably be extended to hold information for |
| 4048 | // break/continue from within the try block. |
| 4049 | enum { FALLING, THROWING, JUMPING }; |
| 4050 | |
| 4051 | JumpTarget try_block; |
| 4052 | JumpTarget finally_block; |
| 4053 | |
| 4054 | try_block.Call(); |
| 4055 | |
| 4056 | frame_->EmitPush(rax); |
| 4057 | // In case of thrown exceptions, this is where we continue. |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4058 | __ Move(rcx, Smi::FromInt(THROWING)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4059 | finally_block.Jump(); |
| 4060 | |
| 4061 | // --- Try block --- |
| 4062 | try_block.Bind(); |
| 4063 | |
| 4064 | frame_->PushTryHandler(TRY_FINALLY_HANDLER); |
| 4065 | int handler_height = frame_->height(); |
| 4066 | |
| 4067 | // Shadow the jump targets for all escapes from the try block, including |
| 4068 | // returns. During shadowing, the original target is hidden as the |
| 4069 | // ShadowTarget and operations on the original actually affect the |
| 4070 | // shadowing target. |
| 4071 | // |
| 4072 | // We should probably try to unify the escaping targets and the return |
| 4073 | // target. |
| 4074 | int nof_escapes = node->escaping_targets()->length(); |
| 4075 | List<ShadowTarget*> shadows(1 + nof_escapes); |
| 4076 | |
| 4077 | // Add the shadow target for the function return. |
| 4078 | static const int kReturnShadowIndex = 0; |
| 4079 | shadows.Add(new ShadowTarget(&function_return_)); |
| 4080 | bool function_return_was_shadowed = function_return_is_shadowed_; |
| 4081 | function_return_is_shadowed_ = true; |
| 4082 | ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 4083 | |
| 4084 | // Add the remaining shadow targets. |
| 4085 | for (int i = 0; i < nof_escapes; i++) { |
| 4086 | shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 4087 | } |
| 4088 | |
| 4089 | // Generate code for the statements in the try block. |
| 4090 | VisitStatementsAndSpill(node->try_block()->statements()); |
| 4091 | |
| 4092 | // Stop the introduced shadowing and count the number of required unlinks. |
| 4093 | // After shadowing stops, the original targets are unshadowed and the |
| 4094 | // ShadowTargets represent the formerly shadowing targets. |
| 4095 | int nof_unlinks = 0; |
| 4096 | for (int i = 0; i < shadows.length(); i++) { |
| 4097 | shadows[i]->StopShadowing(); |
| 4098 | if (shadows[i]->is_linked()) nof_unlinks++; |
| 4099 | } |
| 4100 | function_return_is_shadowed_ = function_return_was_shadowed; |
| 4101 | |
| 4102 | // Get an external reference to the handler address. |
| 4103 | ExternalReference handler_address(Top::k_handler_address); |
| 4104 | |
| 4105 | // If we can fall off the end of the try block, unlink from the try |
| 4106 | // chain and set the state on the frame to FALLING. |
| 4107 | if (has_valid_frame()) { |
| 4108 | // The next handler address is on top of the frame. |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 4109 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4110 | __ movq(kScratchRegister, handler_address); |
| 4111 | frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 4112 | frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 4113 | |
| 4114 | // Fake a top of stack value (unneeded when FALLING) and set the |
| 4115 | // state in ecx, then jump around the unlink blocks if any. |
| 4116 | frame_->EmitPush(Heap::kUndefinedValueRootIndex); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4117 | __ Move(rcx, Smi::FromInt(FALLING)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4118 | if (nof_unlinks > 0) { |
| 4119 | finally_block.Jump(); |
| 4120 | } |
| 4121 | } |
| 4122 | |
| 4123 | // Generate code to unlink and set the state for the (formerly) |
| 4124 | // shadowing targets that have been jumped to. |
| 4125 | for (int i = 0; i < shadows.length(); i++) { |
| 4126 | if (shadows[i]->is_linked()) { |
| 4127 | // If we have come from the shadowed return, the return value is |
| 4128 | // on the virtual frame. We must preserve it until it is |
| 4129 | // pushed. |
| 4130 | if (i == kReturnShadowIndex) { |
| 4131 | Result return_value; |
| 4132 | shadows[i]->Bind(&return_value); |
| 4133 | return_value.ToRegister(rax); |
| 4134 | } else { |
| 4135 | shadows[i]->Bind(); |
| 4136 | } |
| 4137 | // Because we can be jumping here (to spilled code) from |
| 4138 | // unspilled code, we need to reestablish a spilled frame at |
| 4139 | // this block. |
| 4140 | frame_->SpillAll(); |
| 4141 | |
| 4142 | // Reload sp from the top handler, because some statements that |
| 4143 | // we break from (eg, for...in) may have left stuff on the |
| 4144 | // stack. |
| 4145 | __ movq(kScratchRegister, handler_address); |
| 4146 | __ movq(rsp, Operand(kScratchRegister, 0)); |
| 4147 | frame_->Forget(frame_->height() - handler_height); |
| 4148 | |
| 4149 | // Unlink this handler and drop it from the frame. |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 4150 | STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4151 | __ movq(kScratchRegister, handler_address); |
| 4152 | frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 4153 | frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 4154 | |
| 4155 | if (i == kReturnShadowIndex) { |
| 4156 | // If this target shadowed the function return, materialize |
| 4157 | // the return value on the stack. |
| 4158 | frame_->EmitPush(rax); |
| 4159 | } else { |
| 4160 | // Fake TOS for targets that shadowed breaks and continues. |
| 4161 | frame_->EmitPush(Heap::kUndefinedValueRootIndex); |
| 4162 | } |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4163 | __ Move(rcx, Smi::FromInt(JUMPING + i)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4164 | if (--nof_unlinks > 0) { |
| 4165 | // If this is not the last unlink block, jump around the next. |
| 4166 | finally_block.Jump(); |
| 4167 | } |
| 4168 | } |
| 4169 | } |
| 4170 | |
| 4171 | // --- Finally block --- |
| 4172 | finally_block.Bind(); |
| 4173 | |
| 4174 | // Push the state on the stack. |
| 4175 | frame_->EmitPush(rcx); |
| 4176 | |
| 4177 | // We keep two elements on the stack - the (possibly faked) result |
| 4178 | // and the state - while evaluating the finally block. |
| 4179 | // |
| 4180 | // Generate code for the statements in the finally block. |
| 4181 | VisitStatementsAndSpill(node->finally_block()->statements()); |
| 4182 | |
| 4183 | if (has_valid_frame()) { |
| 4184 | // Restore state and return value or faked TOS. |
| 4185 | frame_->EmitPop(rcx); |
| 4186 | frame_->EmitPop(rax); |
| 4187 | } |
| 4188 | |
| 4189 | // Generate code to jump to the right destination for all used |
| 4190 | // formerly shadowing targets. Deallocate each shadow target. |
| 4191 | for (int i = 0; i < shadows.length(); i++) { |
| 4192 | if (has_valid_frame() && shadows[i]->is_bound()) { |
| 4193 | BreakTarget* original = shadows[i]->other_target(); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4194 | __ SmiCompare(rcx, Smi::FromInt(JUMPING + i)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4195 | if (i == kReturnShadowIndex) { |
| 4196 | // The return value is (already) in rax. |
| 4197 | Result return_value = allocator_->Allocate(rax); |
| 4198 | ASSERT(return_value.is_valid()); |
| 4199 | if (function_return_is_shadowed_) { |
| 4200 | original->Branch(equal, &return_value); |
| 4201 | } else { |
| 4202 | // Branch around the preparation for return which may emit |
| 4203 | // code. |
| 4204 | JumpTarget skip; |
| 4205 | skip.Branch(not_equal); |
| 4206 | frame_->PrepareForReturn(); |
| 4207 | original->Jump(&return_value); |
| 4208 | skip.Bind(); |
| 4209 | } |
| 4210 | } else { |
| 4211 | original->Branch(equal); |
| 4212 | } |
| 4213 | } |
| 4214 | } |
| 4215 | |
| 4216 | if (has_valid_frame()) { |
| 4217 | // Check if we need to rethrow the exception. |
| 4218 | JumpTarget exit; |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4219 | __ SmiCompare(rcx, Smi::FromInt(THROWING)); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4220 | exit.Branch(not_equal); |
| 4221 | |
| 4222 | // Rethrow exception. |
| 4223 | frame_->EmitPush(rax); // undo pop from above |
| 4224 | frame_->CallRuntime(Runtime::kReThrow, 1); |
| 4225 | |
| 4226 | // Done. |
| 4227 | exit.Bind(); |
| 4228 | } |
| 4229 | } |
| 4230 | |
| 4231 | |
| 4232 | void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { |
| 4233 | ASSERT(!in_spilled_code()); |
| 4234 | Comment cmnt(masm_, "[ DebuggerStatement"); |
| 4235 | CodeForStatementPosition(node); |
| 4236 | #ifdef ENABLE_DEBUGGER_SUPPORT |
| 4237 | // Spill everything, even constants, to the frame. |
| 4238 | frame_->SpillAll(); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 4239 | |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 4240 | frame_->DebugBreak(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4241 | // Ignore the return value. |
| 4242 | #endif |
| 4243 | } |
| 4244 | |
| 4245 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4246 | void CodeGenerator::InstantiateFunction( |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 4247 | Handle<SharedFunctionInfo> function_info, |
| 4248 | bool pretenure) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4249 | // The inevitable call will sync frame elements to memory anyway, so |
| 4250 | // we do it eagerly to allow us to push the arguments directly into |
| 4251 | // place. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4252 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 4253 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4254 | // Use the fast case closure allocation code that allocates in new |
| 4255 | // space for nested functions that don't need literals cloning. |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 4256 | if (scope()->is_function_scope() && |
| 4257 | function_info->num_literals() == 0 && |
| 4258 | !pretenure) { |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4259 | FastNewClosureStub stub; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4260 | frame_->Push(function_info); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4261 | Result answer = frame_->CallStub(&stub, 1); |
| 4262 | frame_->Push(&answer); |
| 4263 | } else { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4264 | // Call the runtime to instantiate the function based on the |
| 4265 | // shared function info. |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4266 | frame_->EmitPush(rsi); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4267 | frame_->EmitPush(function_info); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 4268 | frame_->EmitPush(pretenure |
| 4269 | ? Factory::true_value() |
| 4270 | : Factory::false_value()); |
| 4271 | Result result = frame_->CallRuntime(Runtime::kNewClosure, 3); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4272 | frame_->Push(&result); |
| 4273 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4274 | } |
| 4275 | |
| 4276 | |
| 4277 | void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
| 4278 | Comment cmnt(masm_, "[ FunctionLiteral"); |
| 4279 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4280 | // Build the function info and instantiate it. |
| 4281 | Handle<SharedFunctionInfo> function_info = |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 4282 | Compiler::BuildFunctionInfo(node, script()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4283 | // Check for stack-overflow exception. |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 4284 | if (function_info.is_null()) { |
| 4285 | SetStackOverflow(); |
| 4286 | return; |
| 4287 | } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 4288 | InstantiateFunction(function_info, node->pretenure()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4289 | } |
| 4290 | |
| 4291 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4292 | void CodeGenerator::VisitSharedFunctionInfoLiteral( |
| 4293 | SharedFunctionInfoLiteral* node) { |
| 4294 | Comment cmnt(masm_, "[ SharedFunctionInfoLiteral"); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 4295 | InstantiateFunction(node->shared_function_info(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4296 | } |
| 4297 | |
| 4298 | |
| 4299 | void CodeGenerator::VisitConditional(Conditional* node) { |
| 4300 | Comment cmnt(masm_, "[ Conditional"); |
| 4301 | JumpTarget then; |
| 4302 | JumpTarget else_; |
| 4303 | JumpTarget exit; |
| 4304 | ControlDestination dest(&then, &else_, true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4305 | LoadCondition(node->condition(), &dest, true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4306 | |
| 4307 | if (dest.false_was_fall_through()) { |
| 4308 | // The else target was bound, so we compile the else part first. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4309 | Load(node->else_expression()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4310 | |
| 4311 | if (then.is_linked()) { |
| 4312 | exit.Jump(); |
| 4313 | then.Bind(); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4314 | Load(node->then_expression()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4315 | } |
| 4316 | } else { |
| 4317 | // The then target was bound, so we compile the then part first. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4318 | Load(node->then_expression()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4319 | |
| 4320 | if (else_.is_linked()) { |
| 4321 | exit.Jump(); |
| 4322 | else_.Bind(); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4323 | Load(node->else_expression()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4324 | } |
| 4325 | } |
| 4326 | |
| 4327 | exit.Bind(); |
| 4328 | } |
| 4329 | |
| 4330 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 4331 | void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
| 4332 | if (slot->type() == Slot::LOOKUP) { |
| 4333 | ASSERT(slot->var()->is_dynamic()); |
| 4334 | |
| 4335 | JumpTarget slow; |
| 4336 | JumpTarget done; |
| 4337 | Result value; |
| 4338 | |
| 4339 | // Generate fast case for loading from slots that correspond to |
| 4340 | // local/global variables or arguments unless they are shadowed by |
| 4341 | // eval-introduced bindings. |
| 4342 | EmitDynamicLoadFromSlotFastCase(slot, |
| 4343 | typeof_state, |
| 4344 | &value, |
| 4345 | &slow, |
| 4346 | &done); |
| 4347 | |
| 4348 | slow.Bind(); |
| 4349 | // A runtime call is inevitable. We eagerly sync frame elements |
| 4350 | // to memory so that we can push the arguments directly into place |
| 4351 | // on top of the frame. |
| 4352 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 4353 | frame_->EmitPush(rsi); |
| 4354 | __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT); |
| 4355 | frame_->EmitPush(kScratchRegister); |
| 4356 | if (typeof_state == INSIDE_TYPEOF) { |
| 4357 | value = |
| 4358 | frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
| 4359 | } else { |
| 4360 | value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
| 4361 | } |
| 4362 | |
| 4363 | done.Bind(&value); |
| 4364 | frame_->Push(&value); |
| 4365 | |
| 4366 | } else if (slot->var()->mode() == Variable::CONST) { |
| 4367 | // Const slots may contain 'the hole' value (the constant hasn't been |
| 4368 | // initialized yet) which needs to be converted into the 'undefined' |
| 4369 | // value. |
| 4370 | // |
| 4371 | // We currently spill the virtual frame because constants use the |
| 4372 | // potentially unsafe direct-frame access of SlotOperand. |
| 4373 | VirtualFrame::SpilledScope spilled_scope; |
| 4374 | Comment cmnt(masm_, "[ Load const"); |
| 4375 | JumpTarget exit; |
| 4376 | __ movq(rcx, SlotOperand(slot, rcx)); |
| 4377 | __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex); |
| 4378 | exit.Branch(not_equal); |
| 4379 | __ LoadRoot(rcx, Heap::kUndefinedValueRootIndex); |
| 4380 | exit.Bind(); |
| 4381 | frame_->EmitPush(rcx); |
| 4382 | |
| 4383 | } else if (slot->type() == Slot::PARAMETER) { |
| 4384 | frame_->PushParameterAt(slot->index()); |
| 4385 | |
| 4386 | } else if (slot->type() == Slot::LOCAL) { |
| 4387 | frame_->PushLocalAt(slot->index()); |
| 4388 | |
| 4389 | } else { |
| 4390 | // The other remaining slot types (LOOKUP and GLOBAL) cannot reach |
| 4391 | // here. |
| 4392 | // |
| 4393 | // The use of SlotOperand below is safe for an unspilled frame |
| 4394 | // because it will always be a context slot. |
| 4395 | ASSERT(slot->type() == Slot::CONTEXT); |
| 4396 | Result temp = allocator_->Allocate(); |
| 4397 | ASSERT(temp.is_valid()); |
| 4398 | __ movq(temp.reg(), SlotOperand(slot, temp.reg())); |
| 4399 | frame_->Push(&temp); |
| 4400 | } |
| 4401 | } |
| 4402 | |
| 4403 | |
| 4404 | void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot, |
| 4405 | TypeofState state) { |
| 4406 | LoadFromSlot(slot, state); |
| 4407 | |
| 4408 | // Bail out quickly if we're not using lazy arguments allocation. |
| 4409 | if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return; |
| 4410 | |
| 4411 | // ... or if the slot isn't a non-parameter arguments slot. |
| 4412 | if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return; |
| 4413 | |
| 4414 | // Pop the loaded value from the stack. |
| 4415 | Result value = frame_->Pop(); |
| 4416 | |
| 4417 | // If the loaded value is a constant, we know if the arguments |
| 4418 | // object has been lazily loaded yet. |
| 4419 | if (value.is_constant()) { |
| 4420 | if (value.handle()->IsTheHole()) { |
| 4421 | Result arguments = StoreArgumentsObject(false); |
| 4422 | frame_->Push(&arguments); |
| 4423 | } else { |
| 4424 | frame_->Push(&value); |
| 4425 | } |
| 4426 | return; |
| 4427 | } |
| 4428 | |
| 4429 | // The loaded value is in a register. If it is the sentinel that |
| 4430 | // indicates that we haven't loaded the arguments object yet, we |
| 4431 | // need to do it now. |
| 4432 | JumpTarget exit; |
| 4433 | __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex); |
| 4434 | frame_->Push(&value); |
| 4435 | exit.Branch(not_equal); |
| 4436 | Result arguments = StoreArgumentsObject(false); |
| 4437 | frame_->SetElementAt(0, &arguments); |
| 4438 | exit.Bind(); |
| 4439 | } |
| 4440 | |
| 4441 | |
| 4442 | Result CodeGenerator::LoadFromGlobalSlotCheckExtensions( |
| 4443 | Slot* slot, |
| 4444 | TypeofState typeof_state, |
| 4445 | JumpTarget* slow) { |
| 4446 | // Check that no extension objects have been created by calls to |
| 4447 | // eval from the current scope to the global scope. |
| 4448 | Register context = rsi; |
| 4449 | Result tmp = allocator_->Allocate(); |
| 4450 | ASSERT(tmp.is_valid()); // All non-reserved registers were available. |
| 4451 | |
| 4452 | Scope* s = scope(); |
| 4453 | while (s != NULL) { |
| 4454 | if (s->num_heap_slots() > 0) { |
| 4455 | if (s->calls_eval()) { |
| 4456 | // Check that extension is NULL. |
| 4457 | __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), |
| 4458 | Immediate(0)); |
| 4459 | slow->Branch(not_equal, not_taken); |
| 4460 | } |
| 4461 | // Load next context in chain. |
| 4462 | __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX)); |
| 4463 | __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); |
| 4464 | context = tmp.reg(); |
| 4465 | } |
| 4466 | // If no outer scope calls eval, we do not need to check more |
| 4467 | // context extensions. If we have reached an eval scope, we check |
| 4468 | // all extensions from this point. |
| 4469 | if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
| 4470 | s = s->outer_scope(); |
| 4471 | } |
| 4472 | |
| 4473 | if (s->is_eval_scope()) { |
| 4474 | // Loop up the context chain. There is no frame effect so it is |
| 4475 | // safe to use raw labels here. |
| 4476 | Label next, fast; |
| 4477 | if (!context.is(tmp.reg())) { |
| 4478 | __ movq(tmp.reg(), context); |
| 4479 | } |
| 4480 | // Load map for comparison into register, outside loop. |
| 4481 | __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex); |
| 4482 | __ bind(&next); |
| 4483 | // Terminate at global context. |
| 4484 | __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset)); |
| 4485 | __ j(equal, &fast); |
| 4486 | // Check that extension is NULL. |
| 4487 | __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0)); |
| 4488 | slow->Branch(not_equal); |
| 4489 | // Load next context in chain. |
| 4490 | __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); |
| 4491 | __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); |
| 4492 | __ jmp(&next); |
| 4493 | __ bind(&fast); |
| 4494 | } |
| 4495 | tmp.Unuse(); |
| 4496 | |
| 4497 | // All extension objects were empty and it is safe to use a global |
| 4498 | // load IC call. |
| 4499 | LoadGlobal(); |
| 4500 | frame_->Push(slot->var()->name()); |
| 4501 | RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) |
| 4502 | ? RelocInfo::CODE_TARGET |
| 4503 | : RelocInfo::CODE_TARGET_CONTEXT; |
| 4504 | Result answer = frame_->CallLoadIC(mode); |
| 4505 | // A test rax instruction following the call signals that the inobject |
| 4506 | // property case was inlined. Ensure that there is not a test rax |
| 4507 | // instruction here. |
| 4508 | masm_->nop(); |
| 4509 | return answer; |
| 4510 | } |
| 4511 | |
| 4512 | |
| 4513 | void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot, |
| 4514 | TypeofState typeof_state, |
| 4515 | Result* result, |
| 4516 | JumpTarget* slow, |
| 4517 | JumpTarget* done) { |
| 4518 | // Generate fast-case code for variables that might be shadowed by |
| 4519 | // eval-introduced variables. Eval is used a lot without |
| 4520 | // introducing variables. In those cases, we do not want to |
| 4521 | // perform a runtime call for all variables in the scope |
| 4522 | // containing the eval. |
| 4523 | if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { |
| 4524 | *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow); |
| 4525 | done->Jump(result); |
| 4526 | |
| 4527 | } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 4528 | Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot(); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 4529 | Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite(); |
| 4530 | if (potential_slot != NULL) { |
| 4531 | // Generate fast case for locals that rewrite to slots. |
| 4532 | // Allocate a fresh register to use as a temp in |
| 4533 | // ContextSlotOperandCheckExtensions and to hold the result |
| 4534 | // value. |
| 4535 | *result = allocator_->Allocate(); |
| 4536 | ASSERT(result->is_valid()); |
| 4537 | __ movq(result->reg(), |
| 4538 | ContextSlotOperandCheckExtensions(potential_slot, |
| 4539 | *result, |
| 4540 | slow)); |
| 4541 | if (potential_slot->var()->mode() == Variable::CONST) { |
| 4542 | __ CompareRoot(result->reg(), Heap::kTheHoleValueRootIndex); |
| 4543 | done->Branch(not_equal, result); |
| 4544 | __ LoadRoot(result->reg(), Heap::kUndefinedValueRootIndex); |
| 4545 | } |
| 4546 | done->Jump(result); |
| 4547 | } else if (rewrite != NULL) { |
| 4548 | // Generate fast case for argument loads. |
| 4549 | Property* property = rewrite->AsProperty(); |
| 4550 | if (property != NULL) { |
| 4551 | VariableProxy* obj_proxy = property->obj()->AsVariableProxy(); |
| 4552 | Literal* key_literal = property->key()->AsLiteral(); |
| 4553 | if (obj_proxy != NULL && |
| 4554 | key_literal != NULL && |
| 4555 | obj_proxy->IsArguments() && |
| 4556 | key_literal->handle()->IsSmi()) { |
| 4557 | // Load arguments object if there are no eval-introduced |
| 4558 | // variables. Then load the argument from the arguments |
| 4559 | // object using keyed load. |
| 4560 | Result arguments = allocator()->Allocate(); |
| 4561 | ASSERT(arguments.is_valid()); |
| 4562 | __ movq(arguments.reg(), |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 4563 | ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(), |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 4564 | arguments, |
| 4565 | slow)); |
| 4566 | frame_->Push(&arguments); |
| 4567 | frame_->Push(key_literal->handle()); |
| 4568 | *result = EmitKeyedLoad(); |
| 4569 | done->Jump(result); |
| 4570 | } |
| 4571 | } |
| 4572 | } |
| 4573 | } |
| 4574 | } |
| 4575 | |
| 4576 | |
| 4577 | void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { |
| 4578 | if (slot->type() == Slot::LOOKUP) { |
| 4579 | ASSERT(slot->var()->is_dynamic()); |
| 4580 | |
| 4581 | // For now, just do a runtime call. Since the call is inevitable, |
| 4582 | // we eagerly sync the virtual frame so we can directly push the |
| 4583 | // arguments into place. |
| 4584 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 4585 | |
| 4586 | frame_->EmitPush(rsi); |
| 4587 | frame_->EmitPush(slot->var()->name()); |
| 4588 | |
| 4589 | Result value; |
| 4590 | if (init_state == CONST_INIT) { |
| 4591 | // Same as the case for a normal store, but ignores attribute |
| 4592 | // (e.g. READ_ONLY) of context slot so that we can initialize const |
| 4593 | // properties (introduced via eval("const foo = (some expr);")). Also, |
| 4594 | // uses the current function context instead of the top context. |
| 4595 | // |
| 4596 | // Note that we must declare the foo upon entry of eval(), via a |
| 4597 | // context slot declaration, but we cannot initialize it at the same |
| 4598 | // time, because the const declaration may be at the end of the eval |
| 4599 | // code (sigh...) and the const variable may have been used before |
| 4600 | // (where its value is 'undefined'). Thus, we can only do the |
| 4601 | // initialization when we actually encounter the expression and when |
| 4602 | // the expression operands are defined and valid, and thus we need the |
| 4603 | // split into 2 operations: declaration of the context slot followed |
| 4604 | // by initialization. |
| 4605 | value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 4606 | } else { |
| 4607 | value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3); |
| 4608 | } |
| 4609 | // Storing a variable must keep the (new) value on the expression |
| 4610 | // stack. This is necessary for compiling chained assignment |
| 4611 | // expressions. |
| 4612 | frame_->Push(&value); |
| 4613 | } else { |
| 4614 | ASSERT(!slot->var()->is_dynamic()); |
| 4615 | |
| 4616 | JumpTarget exit; |
| 4617 | if (init_state == CONST_INIT) { |
| 4618 | ASSERT(slot->var()->mode() == Variable::CONST); |
| 4619 | // Only the first const initialization must be executed (the slot |
| 4620 | // still contains 'the hole' value). When the assignment is executed, |
| 4621 | // the code is identical to a normal store (see below). |
| 4622 | // |
| 4623 | // We spill the frame in the code below because the direct-frame |
| 4624 | // access of SlotOperand is potentially unsafe with an unspilled |
| 4625 | // frame. |
| 4626 | VirtualFrame::SpilledScope spilled_scope; |
| 4627 | Comment cmnt(masm_, "[ Init const"); |
| 4628 | __ movq(rcx, SlotOperand(slot, rcx)); |
| 4629 | __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex); |
| 4630 | exit.Branch(not_equal); |
| 4631 | } |
| 4632 | |
| 4633 | // We must execute the store. Storing a variable must keep the (new) |
| 4634 | // value on the stack. This is necessary for compiling assignment |
| 4635 | // expressions. |
| 4636 | // |
| 4637 | // Note: We will reach here even with slot->var()->mode() == |
| 4638 | // Variable::CONST because of const declarations which will initialize |
| 4639 | // consts to 'the hole' value and by doing so, end up calling this code. |
| 4640 | if (slot->type() == Slot::PARAMETER) { |
| 4641 | frame_->StoreToParameterAt(slot->index()); |
| 4642 | } else if (slot->type() == Slot::LOCAL) { |
| 4643 | frame_->StoreToLocalAt(slot->index()); |
| 4644 | } else { |
| 4645 | // The other slot types (LOOKUP and GLOBAL) cannot reach here. |
| 4646 | // |
| 4647 | // The use of SlotOperand below is safe for an unspilled frame |
| 4648 | // because the slot is a context slot. |
| 4649 | ASSERT(slot->type() == Slot::CONTEXT); |
| 4650 | frame_->Dup(); |
| 4651 | Result value = frame_->Pop(); |
| 4652 | value.ToRegister(); |
| 4653 | Result start = allocator_->Allocate(); |
| 4654 | ASSERT(start.is_valid()); |
| 4655 | __ movq(SlotOperand(slot, start.reg()), value.reg()); |
| 4656 | // RecordWrite may destroy the value registers. |
| 4657 | // |
| 4658 | // TODO(204): Avoid actually spilling when the value is not |
| 4659 | // needed (probably the common case). |
| 4660 | frame_->Spill(value.reg()); |
| 4661 | int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; |
| 4662 | Result temp = allocator_->Allocate(); |
| 4663 | ASSERT(temp.is_valid()); |
| 4664 | __ RecordWrite(start.reg(), offset, value.reg(), temp.reg()); |
| 4665 | // The results start, value, and temp are unused by going out of |
| 4666 | // scope. |
| 4667 | } |
| 4668 | |
| 4669 | exit.Bind(); |
| 4670 | } |
| 4671 | } |
| 4672 | |
| 4673 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4674 | void CodeGenerator::VisitSlot(Slot* node) { |
| 4675 | Comment cmnt(masm_, "[ Slot"); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4676 | LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4677 | } |
| 4678 | |
| 4679 | |
| 4680 | void CodeGenerator::VisitVariableProxy(VariableProxy* node) { |
| 4681 | Comment cmnt(masm_, "[ VariableProxy"); |
| 4682 | Variable* var = node->var(); |
| 4683 | Expression* expr = var->rewrite(); |
| 4684 | if (expr != NULL) { |
| 4685 | Visit(expr); |
| 4686 | } else { |
| 4687 | ASSERT(var->is_global()); |
| 4688 | Reference ref(this, node); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 4689 | ref.GetValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4690 | } |
| 4691 | } |
| 4692 | |
| 4693 | |
| 4694 | void CodeGenerator::VisitLiteral(Literal* node) { |
| 4695 | Comment cmnt(masm_, "[ Literal"); |
| 4696 | frame_->Push(node->handle()); |
| 4697 | } |
| 4698 | |
| 4699 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 4700 | void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) { |
| 4701 | UNIMPLEMENTED(); |
| 4702 | // TODO(X64): Implement security policy for loads of smis. |
| 4703 | } |
| 4704 | |
| 4705 | |
| 4706 | bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) { |
| 4707 | return false; |
| 4708 | } |
| 4709 | |
| 4710 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4711 | // Materialize the regexp literal 'node' in the literals array |
| 4712 | // 'literals' of the function. Leave the regexp boilerplate in |
| 4713 | // 'boilerplate'. |
| 4714 | class DeferredRegExpLiteral: public DeferredCode { |
| 4715 | public: |
| 4716 | DeferredRegExpLiteral(Register boilerplate, |
| 4717 | Register literals, |
| 4718 | RegExpLiteral* node) |
| 4719 | : boilerplate_(boilerplate), literals_(literals), node_(node) { |
| 4720 | set_comment("[ DeferredRegExpLiteral"); |
| 4721 | } |
| 4722 | |
| 4723 | void Generate(); |
| 4724 | |
| 4725 | private: |
| 4726 | Register boilerplate_; |
| 4727 | Register literals_; |
| 4728 | RegExpLiteral* node_; |
| 4729 | }; |
| 4730 | |
| 4731 | |
| 4732 | void DeferredRegExpLiteral::Generate() { |
| 4733 | // Since the entry is undefined we call the runtime system to |
| 4734 | // compute the literal. |
| 4735 | // Literal array (0). |
| 4736 | __ push(literals_); |
| 4737 | // Literal index (1). |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 4738 | __ Push(Smi::FromInt(node_->literal_index())); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4739 | // RegExp pattern (2). |
| 4740 | __ Push(node_->pattern()); |
| 4741 | // RegExp flags (3). |
| 4742 | __ Push(node_->flags()); |
| 4743 | __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
| 4744 | if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); |
| 4745 | } |
| 4746 | |
| 4747 | |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4748 | class DeferredAllocateInNewSpace: public DeferredCode { |
| 4749 | public: |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 4750 | DeferredAllocateInNewSpace(int size, |
| 4751 | Register target, |
| 4752 | int registers_to_save = 0) |
| 4753 | : size_(size), target_(target), registers_to_save_(registers_to_save) { |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4754 | ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); |
| 4755 | set_comment("[ DeferredAllocateInNewSpace"); |
| 4756 | } |
| 4757 | void Generate(); |
| 4758 | |
| 4759 | private: |
| 4760 | int size_; |
| 4761 | Register target_; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 4762 | int registers_to_save_; |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4763 | }; |
| 4764 | |
| 4765 | |
| 4766 | void DeferredAllocateInNewSpace::Generate() { |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 4767 | for (int i = 0; i < kNumRegs; i++) { |
| 4768 | if (registers_to_save_ & (1 << i)) { |
| 4769 | Register save_register = { i }; |
| 4770 | __ push(save_register); |
| 4771 | } |
| 4772 | } |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4773 | __ Push(Smi::FromInt(size_)); |
| 4774 | __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| 4775 | if (!target_.is(rax)) { |
| 4776 | __ movq(target_, rax); |
| 4777 | } |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 4778 | for (int i = kNumRegs - 1; i >= 0; i--) { |
| 4779 | if (registers_to_save_ & (1 << i)) { |
| 4780 | Register save_register = { i }; |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 4781 | __ pop(save_register); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 4782 | } |
| 4783 | } |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4784 | } |
| 4785 | |
| 4786 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4787 | void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) { |
| 4788 | Comment cmnt(masm_, "[ RegExp Literal"); |
| 4789 | |
| 4790 | // Retrieve the literals array and check the allocated entry. Begin |
| 4791 | // with a writable copy of the function of this activation in a |
| 4792 | // register. |
| 4793 | frame_->PushFunction(); |
| 4794 | Result literals = frame_->Pop(); |
| 4795 | literals.ToRegister(); |
| 4796 | frame_->Spill(literals.reg()); |
| 4797 | |
| 4798 | // Load the literals array of the function. |
| 4799 | __ movq(literals.reg(), |
| 4800 | FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); |
| 4801 | |
| 4802 | // Load the literal at the ast saved index. |
| 4803 | Result boilerplate = allocator_->Allocate(); |
| 4804 | ASSERT(boilerplate.is_valid()); |
| 4805 | int literal_offset = |
| 4806 | FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
| 4807 | __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); |
| 4808 | |
| 4809 | // Check whether we need to materialize the RegExp object. If so, |
| 4810 | // jump to the deferred code passing the literals array. |
| 4811 | DeferredRegExpLiteral* deferred = |
| 4812 | new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node); |
| 4813 | __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex); |
| 4814 | deferred->Branch(equal); |
| 4815 | deferred->BindExit(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4816 | |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4817 | // Register of boilerplate contains RegExp object. |
| 4818 | |
| 4819 | Result tmp = allocator()->Allocate(); |
| 4820 | ASSERT(tmp.is_valid()); |
| 4821 | |
| 4822 | int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 4823 | |
| 4824 | DeferredAllocateInNewSpace* allocate_fallback = |
| 4825 | new DeferredAllocateInNewSpace(size, literals.reg()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4826 | frame_->Push(&boilerplate); |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 4827 | frame_->SpillTop(); |
| 4828 | __ AllocateInNewSpace(size, |
| 4829 | literals.reg(), |
| 4830 | tmp.reg(), |
| 4831 | no_reg, |
| 4832 | allocate_fallback->entry_label(), |
| 4833 | TAG_OBJECT); |
| 4834 | allocate_fallback->BindExit(); |
| 4835 | boilerplate = frame_->Pop(); |
| 4836 | // Copy from boilerplate to clone and return clone. |
| 4837 | |
| 4838 | for (int i = 0; i < size; i += kPointerSize) { |
| 4839 | __ movq(tmp.reg(), FieldOperand(boilerplate.reg(), i)); |
| 4840 | __ movq(FieldOperand(literals.reg(), i), tmp.reg()); |
| 4841 | } |
| 4842 | frame_->Push(&literals); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4843 | } |
| 4844 | |
| 4845 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4846 | void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { |
| 4847 | Comment cmnt(masm_, "[ ObjectLiteral"); |
| 4848 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4849 | // Load a writable copy of the function of this activation in a |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4850 | // register. |
| 4851 | frame_->PushFunction(); |
| 4852 | Result literals = frame_->Pop(); |
| 4853 | literals.ToRegister(); |
| 4854 | frame_->Spill(literals.reg()); |
| 4855 | |
| 4856 | // Load the literals array of the function. |
| 4857 | __ movq(literals.reg(), |
| 4858 | FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4859 | // Literal array. |
| 4860 | frame_->Push(&literals); |
| 4861 | // Literal index. |
| 4862 | frame_->Push(Smi::FromInt(node->literal_index())); |
| 4863 | // Constant properties. |
| 4864 | frame_->Push(node->constant_properties()); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4865 | // Should the object literal have fast elements? |
| 4866 | frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0)); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4867 | Result clone; |
| 4868 | if (node->depth() > 1) { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4869 | clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4870 | } else { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 4871 | clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4872 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4873 | frame_->Push(&clone); |
| 4874 | |
Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 4875 | // Mark all computed expressions that are bound to a key that |
| 4876 | // is shadowed by a later occurrence of the same key. For the |
| 4877 | // marked expressions, no store code is emitted. |
| 4878 | node->CalculateEmitStore(); |
| 4879 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4880 | for (int i = 0; i < node->properties()->length(); i++) { |
| 4881 | ObjectLiteral::Property* property = node->properties()->at(i); |
| 4882 | switch (property->kind()) { |
| 4883 | case ObjectLiteral::Property::CONSTANT: |
| 4884 | break; |
| 4885 | case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 4886 | if (CompileTimeValue::IsCompileTimeValue(property->value())) break; |
| 4887 | // else fall through. |
| 4888 | case ObjectLiteral::Property::COMPUTED: { |
| 4889 | Handle<Object> key(property->key()->handle()); |
| 4890 | if (key->IsSymbol()) { |
| 4891 | // Duplicate the object as the IC receiver. |
| 4892 | frame_->Dup(); |
| 4893 | Load(property->value()); |
Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 4894 | if (property->emit_store()) { |
| 4895 | Result ignored = |
| 4896 | frame_->CallStoreIC(Handle<String>::cast(key), false); |
| 4897 | // A test rax instruction following the store IC call would |
| 4898 | // indicate the presence of an inlined version of the |
| 4899 | // store. Add a nop to indicate that there is no such |
| 4900 | // inlined version. |
| 4901 | __ nop(); |
| 4902 | } else { |
| 4903 | frame_->Drop(2); |
| 4904 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4905 | break; |
| 4906 | } |
| 4907 | // Fall through |
| 4908 | } |
| 4909 | case ObjectLiteral::Property::PROTOTYPE: { |
| 4910 | // Duplicate the object as an argument to the runtime call. |
| 4911 | frame_->Dup(); |
| 4912 | Load(property->key()); |
| 4913 | Load(property->value()); |
Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 4914 | if (property->emit_store()) { |
| 4915 | // Ignore the result. |
| 4916 | Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3); |
| 4917 | } else { |
| 4918 | frame_->Drop(3); |
| 4919 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4920 | break; |
| 4921 | } |
| 4922 | case ObjectLiteral::Property::SETTER: { |
| 4923 | // Duplicate the object as an argument to the runtime call. |
| 4924 | frame_->Dup(); |
| 4925 | Load(property->key()); |
| 4926 | frame_->Push(Smi::FromInt(1)); |
| 4927 | Load(property->value()); |
| 4928 | Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
| 4929 | // Ignore the result. |
| 4930 | break; |
| 4931 | } |
| 4932 | case ObjectLiteral::Property::GETTER: { |
| 4933 | // Duplicate the object as an argument to the runtime call. |
| 4934 | frame_->Dup(); |
| 4935 | Load(property->key()); |
| 4936 | frame_->Push(Smi::FromInt(0)); |
| 4937 | Load(property->value()); |
| 4938 | Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4); |
| 4939 | // Ignore the result. |
| 4940 | break; |
| 4941 | } |
| 4942 | default: UNREACHABLE(); |
| 4943 | } |
| 4944 | } |
| 4945 | } |
| 4946 | |
| 4947 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4948 | void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) { |
| 4949 | Comment cmnt(masm_, "[ ArrayLiteral"); |
| 4950 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4951 | // Load a writable copy of the function of this activation in a |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4952 | // register. |
| 4953 | frame_->PushFunction(); |
| 4954 | Result literals = frame_->Pop(); |
| 4955 | literals.ToRegister(); |
| 4956 | frame_->Spill(literals.reg()); |
| 4957 | |
| 4958 | // Load the literals array of the function. |
| 4959 | __ movq(literals.reg(), |
| 4960 | FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 4961 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4962 | frame_->Push(&literals); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4963 | frame_->Push(Smi::FromInt(node->literal_index())); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4964 | frame_->Push(node->constant_elements()); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 4965 | int length = node->values()->length(); |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4966 | Result clone; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 4967 | if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { |
| 4968 | FastCloneShallowArrayStub stub( |
| 4969 | FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); |
| 4970 | clone = frame_->CallStub(&stub, 3); |
| 4971 | __ IncrementCounter(&Counters::cow_arrays_created_stub, 1); |
| 4972 | } else if (node->depth() > 1) { |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4973 | clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 4974 | } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 4975 | clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 4976 | } else { |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 4977 | FastCloneShallowArrayStub stub( |
| 4978 | FastCloneShallowArrayStub::CLONE_ELEMENTS, length); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 4979 | clone = frame_->CallStub(&stub, 3); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4980 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4981 | frame_->Push(&clone); |
| 4982 | |
| 4983 | // Generate code to set the elements in the array that are not |
| 4984 | // literals. |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 4985 | for (int i = 0; i < length; i++) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4986 | Expression* value = node->values()->at(i); |
| 4987 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 4988 | if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) { |
| 4989 | continue; |
| 4990 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 4991 | |
| 4992 | // The property must be set by generated code. |
| 4993 | Load(value); |
| 4994 | |
| 4995 | // Get the property value off the stack. |
| 4996 | Result prop_value = frame_->Pop(); |
| 4997 | prop_value.ToRegister(); |
| 4998 | |
| 4999 | // Fetch the array literal while leaving a copy on the stack and |
| 5000 | // use it to get the elements array. |
| 5001 | frame_->Dup(); |
| 5002 | Result elements = frame_->Pop(); |
| 5003 | elements.ToRegister(); |
| 5004 | frame_->Spill(elements.reg()); |
| 5005 | // Get the elements FixedArray. |
| 5006 | __ movq(elements.reg(), |
| 5007 | FieldOperand(elements.reg(), JSObject::kElementsOffset)); |
| 5008 | |
| 5009 | // Write to the indexed properties array. |
| 5010 | int offset = i * kPointerSize + FixedArray::kHeaderSize; |
| 5011 | __ movq(FieldOperand(elements.reg(), offset), prop_value.reg()); |
| 5012 | |
| 5013 | // Update the write barrier for the array address. |
| 5014 | frame_->Spill(prop_value.reg()); // Overwritten by the write barrier. |
| 5015 | Result scratch = allocator_->Allocate(); |
| 5016 | ASSERT(scratch.is_valid()); |
| 5017 | __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg()); |
| 5018 | } |
| 5019 | } |
| 5020 | |
| 5021 | |
| 5022 | void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) { |
| 5023 | ASSERT(!in_spilled_code()); |
| 5024 | // Call runtime routine to allocate the catch extension object and |
| 5025 | // assign the exception value to the catch variable. |
| 5026 | Comment cmnt(masm_, "[ CatchExtensionObject"); |
| 5027 | Load(node->key()); |
| 5028 | Load(node->value()); |
| 5029 | Result result = |
| 5030 | frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); |
| 5031 | frame_->Push(&result); |
| 5032 | } |
| 5033 | |
| 5034 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5035 | void CodeGenerator::EmitSlotAssignment(Assignment* node) { |
| 5036 | #ifdef DEBUG |
| 5037 | int original_height = frame()->height(); |
| 5038 | #endif |
| 5039 | Comment cmnt(masm(), "[ Variable Assignment"); |
| 5040 | Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 5041 | ASSERT(var != NULL); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 5042 | Slot* slot = var->AsSlot(); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5043 | ASSERT(slot != NULL); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5044 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5045 | // Evaluate the right-hand side. |
| 5046 | if (node->is_compound()) { |
| 5047 | // For a compound assignment the right-hand side is a binary operation |
| 5048 | // between the current property value and the actual right-hand side. |
| 5049 | LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); |
| 5050 | Load(node->value()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5051 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5052 | // Perform the binary operation. |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5053 | bool overwrite_value = node->value()->ResultOverwriteAllowed(); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5054 | // Construct the implicit binary operation. |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5055 | BinaryOperation expr(node); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5056 | GenericBinaryOperation(&expr, |
| 5057 | overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 5058 | } else { |
| 5059 | // For non-compound assignment just load the right-hand side. |
| 5060 | Load(node->value()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5061 | } |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5062 | |
| 5063 | // Perform the assignment. |
| 5064 | if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) { |
| 5065 | CodeForSourcePosition(node->position()); |
| 5066 | StoreToSlot(slot, |
| 5067 | node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT); |
| 5068 | } |
| 5069 | ASSERT(frame()->height() == original_height + 1); |
| 5070 | } |
| 5071 | |
| 5072 | |
| 5073 | void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) { |
| 5074 | #ifdef DEBUG |
| 5075 | int original_height = frame()->height(); |
| 5076 | #endif |
| 5077 | Comment cmnt(masm(), "[ Named Property Assignment"); |
| 5078 | Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 5079 | Property* prop = node->target()->AsProperty(); |
| 5080 | ASSERT(var == NULL || (prop == NULL && var->is_global())); |
| 5081 | |
| 5082 | // Initialize name and evaluate the receiver sub-expression if necessary. If |
| 5083 | // the receiver is trivial it is not placed on the stack at this point, but |
| 5084 | // loaded whenever actually needed. |
| 5085 | Handle<String> name; |
| 5086 | bool is_trivial_receiver = false; |
| 5087 | if (var != NULL) { |
| 5088 | name = var->name(); |
| 5089 | } else { |
| 5090 | Literal* lit = prop->key()->AsLiteral(); |
| 5091 | ASSERT_NOT_NULL(lit); |
| 5092 | name = Handle<String>::cast(lit->handle()); |
| 5093 | // Do not materialize the receiver on the frame if it is trivial. |
| 5094 | is_trivial_receiver = prop->obj()->IsTrivial(); |
| 5095 | if (!is_trivial_receiver) Load(prop->obj()); |
| 5096 | } |
| 5097 | |
| 5098 | // Change to slow case in the beginning of an initialization block to |
| 5099 | // avoid the quadratic behavior of repeatedly adding fast properties. |
| 5100 | if (node->starts_initialization_block()) { |
| 5101 | // Initialization block consists of assignments of the form expr.x = ..., so |
| 5102 | // this will never be an assignment to a variable, so there must be a |
| 5103 | // receiver object. |
| 5104 | ASSERT_EQ(NULL, var); |
| 5105 | if (is_trivial_receiver) { |
| 5106 | frame()->Push(prop->obj()); |
| 5107 | } else { |
| 5108 | frame()->Dup(); |
| 5109 | } |
| 5110 | Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1); |
| 5111 | } |
| 5112 | |
| 5113 | // Change to fast case at the end of an initialization block. To prepare for |
| 5114 | // that add an extra copy of the receiver to the frame, so that it can be |
| 5115 | // converted back to fast case after the assignment. |
| 5116 | if (node->ends_initialization_block() && !is_trivial_receiver) { |
| 5117 | frame()->Dup(); |
| 5118 | } |
| 5119 | |
| 5120 | // Stack layout: |
| 5121 | // [tos] : receiver (only materialized if non-trivial) |
| 5122 | // [tos+1] : receiver if at the end of an initialization block |
| 5123 | |
| 5124 | // Evaluate the right-hand side. |
| 5125 | if (node->is_compound()) { |
| 5126 | // For a compound assignment the right-hand side is a binary operation |
| 5127 | // between the current property value and the actual right-hand side. |
| 5128 | if (is_trivial_receiver) { |
| 5129 | frame()->Push(prop->obj()); |
| 5130 | } else if (var != NULL) { |
| 5131 | // The LoadIC stub expects the object in rax. |
| 5132 | // Freeing rax causes the code generator to load the global into it. |
| 5133 | frame_->Spill(rax); |
| 5134 | LoadGlobal(); |
| 5135 | } else { |
| 5136 | frame()->Dup(); |
| 5137 | } |
| 5138 | Result value = EmitNamedLoad(name, var != NULL); |
| 5139 | frame()->Push(&value); |
| 5140 | Load(node->value()); |
| 5141 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5142 | bool overwrite_value = node->value()->ResultOverwriteAllowed(); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5143 | // Construct the implicit binary operation. |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5144 | BinaryOperation expr(node); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5145 | GenericBinaryOperation(&expr, |
| 5146 | overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 5147 | } else { |
| 5148 | // For non-compound assignment just load the right-hand side. |
| 5149 | Load(node->value()); |
| 5150 | } |
| 5151 | |
| 5152 | // Stack layout: |
| 5153 | // [tos] : value |
| 5154 | // [tos+1] : receiver (only materialized if non-trivial) |
| 5155 | // [tos+2] : receiver if at the end of an initialization block |
| 5156 | |
| 5157 | // Perform the assignment. It is safe to ignore constants here. |
| 5158 | ASSERT(var == NULL || var->mode() != Variable::CONST); |
| 5159 | ASSERT_NE(Token::INIT_CONST, node->op()); |
| 5160 | if (is_trivial_receiver) { |
| 5161 | Result value = frame()->Pop(); |
| 5162 | frame()->Push(prop->obj()); |
| 5163 | frame()->Push(&value); |
| 5164 | } |
| 5165 | CodeForSourcePosition(node->position()); |
| 5166 | bool is_contextual = (var != NULL); |
| 5167 | Result answer = EmitNamedStore(name, is_contextual); |
| 5168 | frame()->Push(&answer); |
| 5169 | |
| 5170 | // Stack layout: |
| 5171 | // [tos] : result |
| 5172 | // [tos+1] : receiver if at the end of an initialization block |
| 5173 | |
| 5174 | if (node->ends_initialization_block()) { |
| 5175 | ASSERT_EQ(NULL, var); |
| 5176 | // The argument to the runtime call is the receiver. |
| 5177 | if (is_trivial_receiver) { |
| 5178 | frame()->Push(prop->obj()); |
| 5179 | } else { |
| 5180 | // A copy of the receiver is below the value of the assignment. Swap |
| 5181 | // the receiver and the value of the assignment expression. |
| 5182 | Result result = frame()->Pop(); |
| 5183 | Result receiver = frame()->Pop(); |
| 5184 | frame()->Push(&result); |
| 5185 | frame()->Push(&receiver); |
| 5186 | } |
| 5187 | Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); |
| 5188 | } |
| 5189 | |
| 5190 | // Stack layout: |
| 5191 | // [tos] : result |
| 5192 | |
| 5193 | ASSERT_EQ(frame()->height(), original_height + 1); |
| 5194 | } |
| 5195 | |
| 5196 | |
| 5197 | void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) { |
| 5198 | #ifdef DEBUG |
| 5199 | int original_height = frame()->height(); |
| 5200 | #endif |
| 5201 | Comment cmnt(masm_, "[ Keyed Property Assignment"); |
| 5202 | Property* prop = node->target()->AsProperty(); |
| 5203 | ASSERT_NOT_NULL(prop); |
| 5204 | |
| 5205 | // Evaluate the receiver subexpression. |
| 5206 | Load(prop->obj()); |
| 5207 | |
| 5208 | // Change to slow case in the beginning of an initialization block to |
| 5209 | // avoid the quadratic behavior of repeatedly adding fast properties. |
| 5210 | if (node->starts_initialization_block()) { |
| 5211 | frame_->Dup(); |
| 5212 | Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); |
| 5213 | } |
| 5214 | |
| 5215 | // Change to fast case at the end of an initialization block. To prepare for |
| 5216 | // that add an extra copy of the receiver to the frame, so that it can be |
| 5217 | // converted back to fast case after the assignment. |
| 5218 | if (node->ends_initialization_block()) { |
| 5219 | frame_->Dup(); |
| 5220 | } |
| 5221 | |
| 5222 | // Evaluate the key subexpression. |
| 5223 | Load(prop->key()); |
| 5224 | |
| 5225 | // Stack layout: |
| 5226 | // [tos] : key |
| 5227 | // [tos+1] : receiver |
| 5228 | // [tos+2] : receiver if at the end of an initialization block |
| 5229 | |
| 5230 | // Evaluate the right-hand side. |
| 5231 | if (node->is_compound()) { |
| 5232 | // For a compound assignment the right-hand side is a binary operation |
| 5233 | // between the current property value and the actual right-hand side. |
| 5234 | // Duplicate receiver and key for loading the current property value. |
| 5235 | frame()->PushElementAt(1); |
| 5236 | frame()->PushElementAt(1); |
| 5237 | Result value = EmitKeyedLoad(); |
| 5238 | frame()->Push(&value); |
| 5239 | Load(node->value()); |
| 5240 | |
| 5241 | // Perform the binary operation. |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5242 | bool overwrite_value = node->value()->ResultOverwriteAllowed(); |
| 5243 | BinaryOperation expr(node); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 5244 | GenericBinaryOperation(&expr, |
| 5245 | overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 5246 | } else { |
| 5247 | // For non-compound assignment just load the right-hand side. |
| 5248 | Load(node->value()); |
| 5249 | } |
| 5250 | |
| 5251 | // Stack layout: |
| 5252 | // [tos] : value |
| 5253 | // [tos+1] : key |
| 5254 | // [tos+2] : receiver |
| 5255 | // [tos+3] : receiver if at the end of an initialization block |
| 5256 | |
| 5257 | // Perform the assignment. It is safe to ignore constants here. |
| 5258 | ASSERT(node->op() != Token::INIT_CONST); |
| 5259 | CodeForSourcePosition(node->position()); |
| 5260 | Result answer = EmitKeyedStore(prop->key()->type()); |
| 5261 | frame()->Push(&answer); |
| 5262 | |
| 5263 | // Stack layout: |
| 5264 | // [tos] : result |
| 5265 | // [tos+1] : receiver if at the end of an initialization block |
| 5266 | |
| 5267 | // Change to fast case at the end of an initialization block. |
| 5268 | if (node->ends_initialization_block()) { |
| 5269 | // The argument to the runtime call is the extra copy of the receiver, |
| 5270 | // which is below the value of the assignment. Swap the receiver and |
| 5271 | // the value of the assignment expression. |
| 5272 | Result result = frame()->Pop(); |
| 5273 | Result receiver = frame()->Pop(); |
| 5274 | frame()->Push(&result); |
| 5275 | frame()->Push(&receiver); |
| 5276 | Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); |
| 5277 | } |
| 5278 | |
| 5279 | // Stack layout: |
| 5280 | // [tos] : result |
| 5281 | |
| 5282 | ASSERT(frame()->height() == original_height + 1); |
| 5283 | } |
| 5284 | |
| 5285 | |
| 5286 | void CodeGenerator::VisitAssignment(Assignment* node) { |
| 5287 | #ifdef DEBUG |
| 5288 | int original_height = frame()->height(); |
| 5289 | #endif |
| 5290 | Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 5291 | Property* prop = node->target()->AsProperty(); |
| 5292 | |
| 5293 | if (var != NULL && !var->is_global()) { |
| 5294 | EmitSlotAssignment(node); |
| 5295 | |
| 5296 | } else if ((prop != NULL && prop->key()->IsPropertyName()) || |
| 5297 | (var != NULL && var->is_global())) { |
| 5298 | // Properties whose keys are property names and global variables are |
| 5299 | // treated as named property references. We do not need to consider |
| 5300 | // global 'this' because it is not a valid left-hand side. |
| 5301 | EmitNamedPropertyAssignment(node); |
| 5302 | |
| 5303 | } else if (prop != NULL) { |
| 5304 | // Other properties (including rewritten parameters for a function that |
| 5305 | // uses arguments) are keyed property assignments. |
| 5306 | EmitKeyedPropertyAssignment(node); |
| 5307 | |
| 5308 | } else { |
| 5309 | // Invalid left-hand side. |
| 5310 | Load(node->target()); |
| 5311 | Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1); |
| 5312 | // The runtime call doesn't actually return but the code generator will |
| 5313 | // still generate code and expects a certain frame height. |
| 5314 | frame()->Push(&result); |
| 5315 | } |
| 5316 | |
| 5317 | ASSERT(frame()->height() == original_height + 1); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5318 | } |
| 5319 | |
| 5320 | |
| 5321 | void CodeGenerator::VisitThrow(Throw* node) { |
| 5322 | Comment cmnt(masm_, "[ Throw"); |
| 5323 | Load(node->exception()); |
| 5324 | Result result = frame_->CallRuntime(Runtime::kThrow, 1); |
| 5325 | frame_->Push(&result); |
| 5326 | } |
| 5327 | |
| 5328 | |
| 5329 | void CodeGenerator::VisitProperty(Property* node) { |
| 5330 | Comment cmnt(masm_, "[ Property"); |
| 5331 | Reference property(this, node); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 5332 | property.GetValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5333 | } |
| 5334 | |
| 5335 | |
| 5336 | void CodeGenerator::VisitCall(Call* node) { |
| 5337 | Comment cmnt(masm_, "[ Call"); |
| 5338 | |
| 5339 | ZoneList<Expression*>* args = node->arguments(); |
| 5340 | |
| 5341 | // Check if the function is a variable or a property. |
| 5342 | Expression* function = node->expression(); |
| 5343 | Variable* var = function->AsVariableProxy()->AsVariable(); |
| 5344 | Property* property = function->AsProperty(); |
| 5345 | |
| 5346 | // ------------------------------------------------------------------------ |
| 5347 | // Fast-case: Use inline caching. |
| 5348 | // --- |
| 5349 | // According to ECMA-262, section 11.2.3, page 44, the function to call |
| 5350 | // must be resolved after the arguments have been evaluated. The IC code |
| 5351 | // automatically handles this by loading the arguments before the function |
| 5352 | // is resolved in cache misses (this also holds for megamorphic calls). |
| 5353 | // ------------------------------------------------------------------------ |
| 5354 | |
| 5355 | if (var != NULL && var->is_possibly_eval()) { |
| 5356 | // ---------------------------------- |
| 5357 | // JavaScript example: 'eval(arg)' // eval is not known to be shadowed |
| 5358 | // ---------------------------------- |
| 5359 | |
| 5360 | // In a call to eval, we first call %ResolvePossiblyDirectEval to |
| 5361 | // resolve the function we need to call and the receiver of the |
| 5362 | // call. Then we call the resolved function using the given |
| 5363 | // arguments. |
| 5364 | |
| 5365 | // Prepare the stack for the call to the resolved function. |
| 5366 | Load(function); |
| 5367 | |
| 5368 | // Allocate a frame slot for the receiver. |
| 5369 | frame_->Push(Factory::undefined_value()); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 5370 | |
| 5371 | // Load the arguments. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5372 | int arg_count = args->length(); |
| 5373 | for (int i = 0; i < arg_count; i++) { |
| 5374 | Load(args->at(i)); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 5375 | frame_->SpillTop(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5376 | } |
| 5377 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 5378 | // Result to hold the result of the function resolution and the |
| 5379 | // final result of the eval call. |
| 5380 | Result result; |
| 5381 | |
| 5382 | // If we know that eval can only be shadowed by eval-introduced |
| 5383 | // variables we attempt to load the global eval function directly |
| 5384 | // in generated code. If we succeed, there is no need to perform a |
| 5385 | // context lookup in the runtime system. |
| 5386 | JumpTarget done; |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 5387 | if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) { |
| 5388 | ASSERT(var->AsSlot()->type() == Slot::LOOKUP); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 5389 | JumpTarget slow; |
| 5390 | // Prepare the stack for the call to |
| 5391 | // ResolvePossiblyDirectEvalNoLookup by pushing the loaded |
| 5392 | // function, the first argument to the eval call and the |
| 5393 | // receiver. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 5394 | Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(), |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 5395 | NOT_INSIDE_TYPEOF, |
| 5396 | &slow); |
| 5397 | frame_->Push(&fun); |
| 5398 | if (arg_count > 0) { |
| 5399 | frame_->PushElementAt(arg_count); |
| 5400 | } else { |
| 5401 | frame_->Push(Factory::undefined_value()); |
| 5402 | } |
| 5403 | frame_->PushParameterAt(-1); |
| 5404 | |
| 5405 | // Resolve the call. |
| 5406 | result = |
| 5407 | frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3); |
| 5408 | |
| 5409 | done.Jump(&result); |
| 5410 | slow.Bind(); |
| 5411 | } |
| 5412 | |
| 5413 | // Prepare the stack for the call to ResolvePossiblyDirectEval by |
| 5414 | // pushing the loaded function, the first argument to the eval |
| 5415 | // call and the receiver. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5416 | frame_->PushElementAt(arg_count + 1); |
| 5417 | if (arg_count > 0) { |
| 5418 | frame_->PushElementAt(arg_count); |
| 5419 | } else { |
| 5420 | frame_->Push(Factory::undefined_value()); |
| 5421 | } |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 5422 | frame_->PushParameterAt(-1); |
| 5423 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5424 | // Resolve the call. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 5425 | result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); |
| 5426 | |
| 5427 | // If we generated fast-case code bind the jump-target where fast |
| 5428 | // and slow case merge. |
| 5429 | if (done.is_linked()) done.Bind(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5430 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 5431 | // The runtime call returns a pair of values in rax (function) and |
| 5432 | // rdx (receiver). Touch up the stack with the right values. |
| 5433 | Result receiver = allocator_->Allocate(rdx); |
| 5434 | frame_->SetElementAt(arg_count + 1, &result); |
| 5435 | frame_->SetElementAt(arg_count, &receiver); |
| 5436 | receiver.Unuse(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5437 | |
| 5438 | // Call the function. |
| 5439 | CodeForSourcePosition(node->position()); |
| 5440 | InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 5441 | CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5442 | result = frame_->CallStub(&call_function, arg_count + 1); |
| 5443 | |
| 5444 | // Restore the context and overwrite the function on the stack with |
| 5445 | // the result. |
| 5446 | frame_->RestoreContextRegister(); |
| 5447 | frame_->SetElementAt(0, &result); |
| 5448 | |
| 5449 | } else if (var != NULL && !var->is_this() && var->is_global()) { |
| 5450 | // ---------------------------------- |
| 5451 | // JavaScript example: 'foo(1, 2, 3)' // foo is global |
| 5452 | // ---------------------------------- |
| 5453 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5454 | // Pass the global object as the receiver and let the IC stub |
| 5455 | // patch the stack to use the global proxy as 'this' in the |
| 5456 | // invoked function. |
| 5457 | LoadGlobal(); |
| 5458 | |
| 5459 | // Load the arguments. |
| 5460 | int arg_count = args->length(); |
| 5461 | for (int i = 0; i < arg_count; i++) { |
| 5462 | Load(args->at(i)); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 5463 | frame_->SpillTop(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5464 | } |
| 5465 | |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 5466 | // Push the name of the function on the frame. |
| 5467 | frame_->Push(var->name()); |
| 5468 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5469 | // Call the IC initialization code. |
| 5470 | CodeForSourcePosition(node->position()); |
| 5471 | Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT, |
| 5472 | arg_count, |
| 5473 | loop_nesting()); |
| 5474 | frame_->RestoreContextRegister(); |
| 5475 | // Replace the function on the stack with the result. |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 5476 | frame_->Push(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5477 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 5478 | } else if (var != NULL && var->AsSlot() != NULL && |
| 5479 | var->AsSlot()->type() == Slot::LOOKUP) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5480 | // ---------------------------------- |
Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 5481 | // JavaScript examples: |
| 5482 | // |
| 5483 | // with (obj) foo(1, 2, 3) // foo may be in obj. |
| 5484 | // |
| 5485 | // function f() {}; |
| 5486 | // function g() { |
| 5487 | // eval(...); |
| 5488 | // f(); // f could be in extension object. |
| 5489 | // } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5490 | // ---------------------------------- |
| 5491 | |
Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 5492 | JumpTarget slow, done; |
| 5493 | Result function; |
| 5494 | |
| 5495 | // Generate fast case for loading functions from slots that |
| 5496 | // correspond to local/global variables or arguments unless they |
| 5497 | // are shadowed by eval-introduced bindings. |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 5498 | EmitDynamicLoadFromSlotFastCase(var->AsSlot(), |
Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 5499 | NOT_INSIDE_TYPEOF, |
| 5500 | &function, |
| 5501 | &slow, |
| 5502 | &done); |
| 5503 | |
| 5504 | slow.Bind(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5505 | // Load the function from the context. Sync the frame so we can |
| 5506 | // push the arguments directly into place. |
| 5507 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 5508 | frame_->EmitPush(rsi); |
| 5509 | frame_->EmitPush(var->name()); |
| 5510 | frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
| 5511 | // The runtime call returns a pair of values in rax and rdx. The |
| 5512 | // looked-up function is in rax and the receiver is in rdx. These |
| 5513 | // register references are not ref counted here. We spill them |
| 5514 | // eagerly since they are arguments to an inevitable call (and are |
| 5515 | // not sharable by the arguments). |
| 5516 | ASSERT(!allocator()->is_used(rax)); |
| 5517 | frame_->EmitPush(rax); |
| 5518 | |
| 5519 | // Load the receiver. |
| 5520 | ASSERT(!allocator()->is_used(rdx)); |
| 5521 | frame_->EmitPush(rdx); |
| 5522 | |
Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 5523 | // If fast case code has been generated, emit code to push the |
| 5524 | // function and receiver and have the slow path jump around this |
| 5525 | // code. |
| 5526 | if (done.is_linked()) { |
| 5527 | JumpTarget call; |
| 5528 | call.Jump(); |
| 5529 | done.Bind(&function); |
| 5530 | frame_->Push(&function); |
| 5531 | LoadGlobalReceiver(); |
| 5532 | call.Bind(); |
| 5533 | } |
| 5534 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5535 | // Call the function. |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 5536 | CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5537 | |
| 5538 | } else if (property != NULL) { |
| 5539 | // Check if the key is a literal string. |
| 5540 | Literal* literal = property->key()->AsLiteral(); |
| 5541 | |
| 5542 | if (literal != NULL && literal->handle()->IsSymbol()) { |
| 5543 | // ------------------------------------------------------------------ |
| 5544 | // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)' |
| 5545 | // ------------------------------------------------------------------ |
| 5546 | |
| 5547 | Handle<String> name = Handle<String>::cast(literal->handle()); |
| 5548 | |
| 5549 | if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION && |
| 5550 | name->IsEqualTo(CStrVector("apply")) && |
| 5551 | args->length() == 2 && |
| 5552 | args->at(1)->AsVariableProxy() != NULL && |
| 5553 | args->at(1)->AsVariableProxy()->IsArguments()) { |
| 5554 | // Use the optimized Function.prototype.apply that avoids |
| 5555 | // allocating lazily allocated arguments objects. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 5556 | CallApplyLazy(property->obj(), |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5557 | args->at(0), |
| 5558 | args->at(1)->AsVariableProxy(), |
| 5559 | node->position()); |
| 5560 | |
| 5561 | } else { |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 5562 | // Push the receiver onto the frame. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5563 | Load(property->obj()); |
| 5564 | |
| 5565 | // Load the arguments. |
| 5566 | int arg_count = args->length(); |
| 5567 | for (int i = 0; i < arg_count; i++) { |
| 5568 | Load(args->at(i)); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 5569 | frame_->SpillTop(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5570 | } |
| 5571 | |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 5572 | // Push the name of the function onto the frame. |
| 5573 | frame_->Push(name); |
| 5574 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5575 | // Call the IC initialization code. |
| 5576 | CodeForSourcePosition(node->position()); |
| 5577 | Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET, |
| 5578 | arg_count, |
| 5579 | loop_nesting()); |
| 5580 | frame_->RestoreContextRegister(); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 5581 | frame_->Push(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5582 | } |
| 5583 | |
| 5584 | } else { |
| 5585 | // ------------------------------------------- |
| 5586 | // JavaScript example: 'array[index](1, 2, 3)' |
| 5587 | // ------------------------------------------- |
| 5588 | |
| 5589 | // Load the function to call from the property through a reference. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5590 | if (property->is_synthetic()) { |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 5591 | Reference ref(this, property, false); |
| 5592 | ref.GetValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5593 | // Use global object as receiver. |
| 5594 | LoadGlobalReceiver(); |
Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 5595 | // Call the function. |
| 5596 | CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5597 | } else { |
Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 5598 | // Push the receiver onto the frame. |
| 5599 | Load(property->obj()); |
| 5600 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 5601 | // Load the name of the function. |
| 5602 | Load(property->key()); |
| 5603 | |
| 5604 | // Swap the name of the function and the receiver on the stack to follow |
| 5605 | // the calling convention for call ICs. |
| 5606 | Result key = frame_->Pop(); |
| 5607 | Result receiver = frame_->Pop(); |
| 5608 | frame_->Push(&key); |
| 5609 | frame_->Push(&receiver); |
| 5610 | key.Unuse(); |
| 5611 | receiver.Unuse(); |
| 5612 | |
Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 5613 | // Load the arguments. |
| 5614 | int arg_count = args->length(); |
| 5615 | for (int i = 0; i < arg_count; i++) { |
| 5616 | Load(args->at(i)); |
| 5617 | frame_->SpillTop(); |
| 5618 | } |
| 5619 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 5620 | // Place the key on top of stack and call the IC initialization code. |
| 5621 | frame_->PushElementAt(arg_count + 1); |
Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 5622 | CodeForSourcePosition(node->position()); |
| 5623 | Result result = frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET, |
| 5624 | arg_count, |
| 5625 | loop_nesting()); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 5626 | frame_->Drop(); // Drop the key still on the stack. |
Kristian Monsen | 9dcf7e2 | 2010-06-28 14:14:28 +0100 | [diff] [blame] | 5627 | frame_->RestoreContextRegister(); |
| 5628 | frame_->Push(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5629 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5630 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5631 | } else { |
| 5632 | // ---------------------------------- |
| 5633 | // JavaScript example: 'foo(1, 2, 3)' // foo is not global |
| 5634 | // ---------------------------------- |
| 5635 | |
| 5636 | // Load the function. |
| 5637 | Load(function); |
| 5638 | |
| 5639 | // Pass the global proxy as the receiver. |
| 5640 | LoadGlobalReceiver(); |
| 5641 | |
| 5642 | // Call the function. |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 5643 | CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5644 | } |
| 5645 | } |
| 5646 | |
| 5647 | |
| 5648 | void CodeGenerator::VisitCallNew(CallNew* node) { |
| 5649 | Comment cmnt(masm_, "[ CallNew"); |
| 5650 | |
| 5651 | // According to ECMA-262, section 11.2.2, page 44, the function |
| 5652 | // expression in new calls must be evaluated before the |
| 5653 | // arguments. This is different from ordinary calls, where the |
| 5654 | // actual function to call is resolved after the arguments have been |
| 5655 | // evaluated. |
| 5656 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5657 | // Push constructor on the stack. If it's not a function it's used as |
| 5658 | // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
| 5659 | // ignored. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5660 | Load(node->expression()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5661 | |
| 5662 | // Push the arguments ("left-to-right") on the stack. |
| 5663 | ZoneList<Expression*>* args = node->arguments(); |
| 5664 | int arg_count = args->length(); |
| 5665 | for (int i = 0; i < arg_count; i++) { |
| 5666 | Load(args->at(i)); |
| 5667 | } |
| 5668 | |
| 5669 | // Call the construct call builtin that handles allocation and |
| 5670 | // constructor invocation. |
| 5671 | CodeForSourcePosition(node->position()); |
| 5672 | Result result = frame_->CallConstructor(arg_count); |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 5673 | frame_->Push(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 5674 | } |
| 5675 | |
| 5676 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 5677 | void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) { |
| 5678 | ASSERT(args->length() == 1); |
| 5679 | Load(args->at(0)); |
| 5680 | Result value = frame_->Pop(); |
| 5681 | value.ToRegister(); |
| 5682 | ASSERT(value.is_valid()); |
| 5683 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 5684 | value.Unuse(); |
| 5685 | destination()->Split(is_smi); |
| 5686 | } |
| 5687 | |
| 5688 | |
| 5689 | void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) { |
| 5690 | // Conditionally generate a log call. |
| 5691 | // Args: |
| 5692 | // 0 (literal string): The type of logging (corresponds to the flags). |
| 5693 | // This is used to determine whether or not to generate the log call. |
| 5694 | // 1 (string): Format string. Access the string at argument index 2 |
| 5695 | // with '%2s' (see Logger::LogRuntime for all the formats). |
| 5696 | // 2 (array): Arguments to the format string. |
| 5697 | ASSERT_EQ(args->length(), 3); |
| 5698 | #ifdef ENABLE_LOGGING_AND_PROFILING |
| 5699 | if (ShouldGenerateLog(args->at(0))) { |
| 5700 | Load(args->at(1)); |
| 5701 | Load(args->at(2)); |
| 5702 | frame_->CallRuntime(Runtime::kLog, 2); |
| 5703 | } |
| 5704 | #endif |
| 5705 | // Finally, we're expected to leave a value on the top of the stack. |
| 5706 | frame_->Push(Factory::undefined_value()); |
| 5707 | } |
| 5708 | |
| 5709 | |
| 5710 | void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { |
| 5711 | ASSERT(args->length() == 1); |
| 5712 | Load(args->at(0)); |
| 5713 | Result value = frame_->Pop(); |
| 5714 | value.ToRegister(); |
| 5715 | ASSERT(value.is_valid()); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 5716 | Condition non_negative_smi = masm_->CheckNonNegativeSmi(value.reg()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 5717 | value.Unuse(); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 5718 | destination()->Split(non_negative_smi); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 5719 | } |
| 5720 | |
| 5721 | |
| 5722 | class DeferredStringCharCodeAt : public DeferredCode { |
| 5723 | public: |
| 5724 | DeferredStringCharCodeAt(Register object, |
| 5725 | Register index, |
| 5726 | Register scratch, |
| 5727 | Register result) |
| 5728 | : result_(result), |
| 5729 | char_code_at_generator_(object, |
| 5730 | index, |
| 5731 | scratch, |
| 5732 | result, |
| 5733 | &need_conversion_, |
| 5734 | &need_conversion_, |
| 5735 | &index_out_of_range_, |
| 5736 | STRING_INDEX_IS_NUMBER) {} |
| 5737 | |
| 5738 | StringCharCodeAtGenerator* fast_case_generator() { |
| 5739 | return &char_code_at_generator_; |
| 5740 | } |
| 5741 | |
| 5742 | virtual void Generate() { |
| 5743 | VirtualFrameRuntimeCallHelper call_helper(frame_state()); |
| 5744 | char_code_at_generator_.GenerateSlow(masm(), call_helper); |
| 5745 | |
| 5746 | __ bind(&need_conversion_); |
| 5747 | // Move the undefined value into the result register, which will |
| 5748 | // trigger conversion. |
| 5749 | __ LoadRoot(result_, Heap::kUndefinedValueRootIndex); |
| 5750 | __ jmp(exit_label()); |
| 5751 | |
| 5752 | __ bind(&index_out_of_range_); |
| 5753 | // When the index is out of range, the spec requires us to return |
| 5754 | // NaN. |
| 5755 | __ LoadRoot(result_, Heap::kNanValueRootIndex); |
| 5756 | __ jmp(exit_label()); |
| 5757 | } |
| 5758 | |
| 5759 | private: |
| 5760 | Register result_; |
| 5761 | |
| 5762 | Label need_conversion_; |
| 5763 | Label index_out_of_range_; |
| 5764 | |
| 5765 | StringCharCodeAtGenerator char_code_at_generator_; |
| 5766 | }; |
| 5767 | |
| 5768 | |
| 5769 | // This generates code that performs a String.prototype.charCodeAt() call |
| 5770 | // or returns a smi in order to trigger conversion. |
| 5771 | void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) { |
| 5772 | Comment(masm_, "[ GenerateStringCharCodeAt"); |
| 5773 | ASSERT(args->length() == 2); |
| 5774 | |
| 5775 | Load(args->at(0)); |
| 5776 | Load(args->at(1)); |
| 5777 | Result index = frame_->Pop(); |
| 5778 | Result object = frame_->Pop(); |
| 5779 | object.ToRegister(); |
| 5780 | index.ToRegister(); |
| 5781 | // We might mutate the object register. |
| 5782 | frame_->Spill(object.reg()); |
| 5783 | |
| 5784 | // We need two extra registers. |
| 5785 | Result result = allocator()->Allocate(); |
| 5786 | ASSERT(result.is_valid()); |
| 5787 | Result scratch = allocator()->Allocate(); |
| 5788 | ASSERT(scratch.is_valid()); |
| 5789 | |
| 5790 | DeferredStringCharCodeAt* deferred = |
| 5791 | new DeferredStringCharCodeAt(object.reg(), |
| 5792 | index.reg(), |
| 5793 | scratch.reg(), |
| 5794 | result.reg()); |
| 5795 | deferred->fast_case_generator()->GenerateFast(masm_); |
| 5796 | deferred->BindExit(); |
| 5797 | frame_->Push(&result); |
| 5798 | } |
| 5799 | |
| 5800 | |
| 5801 | class DeferredStringCharFromCode : public DeferredCode { |
| 5802 | public: |
| 5803 | DeferredStringCharFromCode(Register code, |
| 5804 | Register result) |
| 5805 | : char_from_code_generator_(code, result) {} |
| 5806 | |
| 5807 | StringCharFromCodeGenerator* fast_case_generator() { |
| 5808 | return &char_from_code_generator_; |
| 5809 | } |
| 5810 | |
| 5811 | virtual void Generate() { |
| 5812 | VirtualFrameRuntimeCallHelper call_helper(frame_state()); |
| 5813 | char_from_code_generator_.GenerateSlow(masm(), call_helper); |
| 5814 | } |
| 5815 | |
| 5816 | private: |
| 5817 | StringCharFromCodeGenerator char_from_code_generator_; |
| 5818 | }; |
| 5819 | |
| 5820 | |
| 5821 | // Generates code for creating a one-char string from a char code. |
| 5822 | void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) { |
| 5823 | Comment(masm_, "[ GenerateStringCharFromCode"); |
| 5824 | ASSERT(args->length() == 1); |
| 5825 | |
| 5826 | Load(args->at(0)); |
| 5827 | |
| 5828 | Result code = frame_->Pop(); |
| 5829 | code.ToRegister(); |
| 5830 | ASSERT(code.is_valid()); |
| 5831 | |
| 5832 | Result result = allocator()->Allocate(); |
| 5833 | ASSERT(result.is_valid()); |
| 5834 | |
| 5835 | DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode( |
| 5836 | code.reg(), result.reg()); |
| 5837 | deferred->fast_case_generator()->GenerateFast(masm_); |
| 5838 | deferred->BindExit(); |
| 5839 | frame_->Push(&result); |
| 5840 | } |
| 5841 | |
| 5842 | |
| 5843 | class DeferredStringCharAt : public DeferredCode { |
| 5844 | public: |
| 5845 | DeferredStringCharAt(Register object, |
| 5846 | Register index, |
| 5847 | Register scratch1, |
| 5848 | Register scratch2, |
| 5849 | Register result) |
| 5850 | : result_(result), |
| 5851 | char_at_generator_(object, |
| 5852 | index, |
| 5853 | scratch1, |
| 5854 | scratch2, |
| 5855 | result, |
| 5856 | &need_conversion_, |
| 5857 | &need_conversion_, |
| 5858 | &index_out_of_range_, |
| 5859 | STRING_INDEX_IS_NUMBER) {} |
| 5860 | |
| 5861 | StringCharAtGenerator* fast_case_generator() { |
| 5862 | return &char_at_generator_; |
| 5863 | } |
| 5864 | |
| 5865 | virtual void Generate() { |
| 5866 | VirtualFrameRuntimeCallHelper call_helper(frame_state()); |
| 5867 | char_at_generator_.GenerateSlow(masm(), call_helper); |
| 5868 | |
| 5869 | __ bind(&need_conversion_); |
| 5870 | // Move smi zero into the result register, which will trigger |
| 5871 | // conversion. |
| 5872 | __ Move(result_, Smi::FromInt(0)); |
| 5873 | __ jmp(exit_label()); |
| 5874 | |
| 5875 | __ bind(&index_out_of_range_); |
| 5876 | // When the index is out of range, the spec requires us to return |
| 5877 | // the empty string. |
| 5878 | __ LoadRoot(result_, Heap::kEmptyStringRootIndex); |
| 5879 | __ jmp(exit_label()); |
| 5880 | } |
| 5881 | |
| 5882 | private: |
| 5883 | Register result_; |
| 5884 | |
| 5885 | Label need_conversion_; |
| 5886 | Label index_out_of_range_; |
| 5887 | |
| 5888 | StringCharAtGenerator char_at_generator_; |
| 5889 | }; |
| 5890 | |
| 5891 | |
| 5892 | // This generates code that performs a String.prototype.charAt() call |
| 5893 | // or returns a smi in order to trigger conversion. |
| 5894 | void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) { |
| 5895 | Comment(masm_, "[ GenerateStringCharAt"); |
| 5896 | ASSERT(args->length() == 2); |
| 5897 | |
| 5898 | Load(args->at(0)); |
| 5899 | Load(args->at(1)); |
| 5900 | Result index = frame_->Pop(); |
| 5901 | Result object = frame_->Pop(); |
| 5902 | object.ToRegister(); |
| 5903 | index.ToRegister(); |
| 5904 | // We might mutate the object register. |
| 5905 | frame_->Spill(object.reg()); |
| 5906 | |
| 5907 | // We need three extra registers. |
| 5908 | Result result = allocator()->Allocate(); |
| 5909 | ASSERT(result.is_valid()); |
| 5910 | Result scratch1 = allocator()->Allocate(); |
| 5911 | ASSERT(scratch1.is_valid()); |
| 5912 | Result scratch2 = allocator()->Allocate(); |
| 5913 | ASSERT(scratch2.is_valid()); |
| 5914 | |
| 5915 | DeferredStringCharAt* deferred = |
| 5916 | new DeferredStringCharAt(object.reg(), |
| 5917 | index.reg(), |
| 5918 | scratch1.reg(), |
| 5919 | scratch2.reg(), |
| 5920 | result.reg()); |
| 5921 | deferred->fast_case_generator()->GenerateFast(masm_); |
| 5922 | deferred->BindExit(); |
| 5923 | frame_->Push(&result); |
| 5924 | } |
| 5925 | |
| 5926 | |
| 5927 | void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
| 5928 | ASSERT(args->length() == 1); |
| 5929 | Load(args->at(0)); |
| 5930 | Result value = frame_->Pop(); |
| 5931 | value.ToRegister(); |
| 5932 | ASSERT(value.is_valid()); |
| 5933 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 5934 | destination()->false_target()->Branch(is_smi); |
| 5935 | // It is a heap object - get map. |
| 5936 | // Check if the object is a JS array or not. |
| 5937 | __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, kScratchRegister); |
| 5938 | value.Unuse(); |
| 5939 | destination()->Split(equal); |
| 5940 | } |
| 5941 | |
| 5942 | |
| 5943 | void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) { |
| 5944 | ASSERT(args->length() == 1); |
| 5945 | Load(args->at(0)); |
| 5946 | Result value = frame_->Pop(); |
| 5947 | value.ToRegister(); |
| 5948 | ASSERT(value.is_valid()); |
| 5949 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 5950 | destination()->false_target()->Branch(is_smi); |
| 5951 | // It is a heap object - get map. |
| 5952 | // Check if the object is a regexp. |
| 5953 | __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, kScratchRegister); |
| 5954 | value.Unuse(); |
| 5955 | destination()->Split(equal); |
| 5956 | } |
| 5957 | |
| 5958 | |
| 5959 | void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) { |
| 5960 | // This generates a fast version of: |
| 5961 | // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp') |
| 5962 | ASSERT(args->length() == 1); |
| 5963 | Load(args->at(0)); |
| 5964 | Result obj = frame_->Pop(); |
| 5965 | obj.ToRegister(); |
| 5966 | Condition is_smi = masm_->CheckSmi(obj.reg()); |
| 5967 | destination()->false_target()->Branch(is_smi); |
| 5968 | |
| 5969 | __ Move(kScratchRegister, Factory::null_value()); |
| 5970 | __ cmpq(obj.reg(), kScratchRegister); |
| 5971 | destination()->true_target()->Branch(equal); |
| 5972 | |
| 5973 | __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 5974 | // Undetectable objects behave like undefined when tested with typeof. |
| 5975 | __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
| 5976 | Immediate(1 << Map::kIsUndetectable)); |
| 5977 | destination()->false_target()->Branch(not_zero); |
| 5978 | __ movzxbq(kScratchRegister, |
| 5979 | FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); |
| 5980 | __ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE)); |
| 5981 | destination()->false_target()->Branch(below); |
| 5982 | __ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE)); |
| 5983 | obj.Unuse(); |
| 5984 | destination()->Split(below_equal); |
| 5985 | } |
| 5986 | |
| 5987 | |
| 5988 | void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) { |
| 5989 | // This generates a fast version of: |
| 5990 | // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' || |
| 5991 | // typeof(arg) == function). |
| 5992 | // It includes undetectable objects (as opposed to IsObject). |
| 5993 | ASSERT(args->length() == 1); |
| 5994 | Load(args->at(0)); |
| 5995 | Result value = frame_->Pop(); |
| 5996 | value.ToRegister(); |
| 5997 | ASSERT(value.is_valid()); |
| 5998 | Condition is_smi = masm_->CheckSmi(value.reg()); |
| 5999 | destination()->false_target()->Branch(is_smi); |
| 6000 | // Check that this is an object. |
| 6001 | __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, kScratchRegister); |
| 6002 | value.Unuse(); |
| 6003 | destination()->Split(above_equal); |
| 6004 | } |
| 6005 | |
| 6006 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 6007 | // Deferred code to check whether the String JavaScript object is safe for using |
| 6008 | // default value of. This code is called after the bit caching this information |
| 6009 | // in the map has been checked with the map for the object in the map_result_ |
| 6010 | // register. On return the register map_result_ contains 1 for true and 0 for |
| 6011 | // false. |
| 6012 | class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode { |
| 6013 | public: |
| 6014 | DeferredIsStringWrapperSafeForDefaultValueOf(Register object, |
| 6015 | Register map_result, |
| 6016 | Register scratch1, |
| 6017 | Register scratch2) |
| 6018 | : object_(object), |
| 6019 | map_result_(map_result), |
| 6020 | scratch1_(scratch1), |
| 6021 | scratch2_(scratch2) { } |
| 6022 | |
| 6023 | virtual void Generate() { |
| 6024 | Label false_result; |
| 6025 | |
| 6026 | // Check that map is loaded as expected. |
| 6027 | if (FLAG_debug_code) { |
| 6028 | __ cmpq(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); |
| 6029 | __ Assert(equal, "Map not in expected register"); |
| 6030 | } |
| 6031 | |
| 6032 | // Check for fast case object. Generate false result for slow case object. |
| 6033 | __ movq(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset)); |
| 6034 | __ movq(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset)); |
| 6035 | __ CompareRoot(scratch1_, Heap::kHashTableMapRootIndex); |
| 6036 | __ j(equal, &false_result); |
| 6037 | |
| 6038 | // Look for valueOf symbol in the descriptor array, and indicate false if |
| 6039 | // found. The type is not checked, so if it is a transition it is a false |
| 6040 | // negative. |
| 6041 | __ movq(map_result_, |
| 6042 | FieldOperand(map_result_, Map::kInstanceDescriptorsOffset)); |
| 6043 | __ movq(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset)); |
| 6044 | // map_result_: descriptor array |
| 6045 | // scratch1_: length of descriptor array |
| 6046 | // Calculate the end of the descriptor array. |
| 6047 | SmiIndex index = masm_->SmiToIndex(scratch2_, scratch1_, kPointerSizeLog2); |
| 6048 | __ lea(scratch1_, |
| 6049 | Operand( |
| 6050 | map_result_, index.reg, index.scale, FixedArray::kHeaderSize)); |
| 6051 | // Calculate location of the first key name. |
| 6052 | __ addq(map_result_, |
| 6053 | Immediate(FixedArray::kHeaderSize + |
| 6054 | DescriptorArray::kFirstIndex * kPointerSize)); |
| 6055 | // Loop through all the keys in the descriptor array. If one of these is the |
| 6056 | // symbol valueOf the result is false. |
| 6057 | Label entry, loop; |
| 6058 | __ jmp(&entry); |
| 6059 | __ bind(&loop); |
| 6060 | __ movq(scratch2_, FieldOperand(map_result_, 0)); |
| 6061 | __ Cmp(scratch2_, Factory::value_of_symbol()); |
| 6062 | __ j(equal, &false_result); |
| 6063 | __ addq(map_result_, Immediate(kPointerSize)); |
| 6064 | __ bind(&entry); |
| 6065 | __ cmpq(map_result_, scratch1_); |
| 6066 | __ j(not_equal, &loop); |
| 6067 | |
| 6068 | // Reload map as register map_result_ was used as temporary above. |
| 6069 | __ movq(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); |
| 6070 | |
| 6071 | // If a valueOf property is not found on the object check that it's |
| 6072 | // prototype is the un-modified String prototype. If not result is false. |
| 6073 | __ movq(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset)); |
| 6074 | __ testq(scratch1_, Immediate(kSmiTagMask)); |
| 6075 | __ j(zero, &false_result); |
| 6076 | __ movq(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset)); |
| 6077 | __ movq(scratch2_, |
| 6078 | Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 6079 | __ movq(scratch2_, |
| 6080 | FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset)); |
| 6081 | __ cmpq(scratch1_, |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 6082 | ContextOperand( |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 6083 | scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); |
| 6084 | __ j(not_equal, &false_result); |
| 6085 | // Set the bit in the map to indicate that it has been checked safe for |
| 6086 | // default valueOf and set true result. |
| 6087 | __ or_(FieldOperand(map_result_, Map::kBitField2Offset), |
| 6088 | Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
| 6089 | __ Set(map_result_, 1); |
| 6090 | __ jmp(exit_label()); |
| 6091 | __ bind(&false_result); |
| 6092 | // Set false result. |
| 6093 | __ Set(map_result_, 0); |
| 6094 | } |
| 6095 | |
| 6096 | private: |
| 6097 | Register object_; |
| 6098 | Register map_result_; |
| 6099 | Register scratch1_; |
| 6100 | Register scratch2_; |
| 6101 | }; |
| 6102 | |
| 6103 | |
| 6104 | void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf( |
| 6105 | ZoneList<Expression*>* args) { |
| 6106 | ASSERT(args->length() == 1); |
| 6107 | Load(args->at(0)); |
| 6108 | Result obj = frame_->Pop(); // Pop the string wrapper. |
| 6109 | obj.ToRegister(); |
| 6110 | ASSERT(obj.is_valid()); |
| 6111 | if (FLAG_debug_code) { |
| 6112 | __ AbortIfSmi(obj.reg()); |
| 6113 | } |
| 6114 | |
| 6115 | // Check whether this map has already been checked to be safe for default |
| 6116 | // valueOf. |
| 6117 | Result map_result = allocator()->Allocate(); |
| 6118 | ASSERT(map_result.is_valid()); |
| 6119 | __ movq(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 6120 | __ testb(FieldOperand(map_result.reg(), Map::kBitField2Offset), |
| 6121 | Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
| 6122 | destination()->true_target()->Branch(not_zero); |
| 6123 | |
| 6124 | // We need an additional two scratch registers for the deferred code. |
| 6125 | Result temp1 = allocator()->Allocate(); |
| 6126 | ASSERT(temp1.is_valid()); |
| 6127 | Result temp2 = allocator()->Allocate(); |
| 6128 | ASSERT(temp2.is_valid()); |
| 6129 | |
| 6130 | DeferredIsStringWrapperSafeForDefaultValueOf* deferred = |
| 6131 | new DeferredIsStringWrapperSafeForDefaultValueOf( |
| 6132 | obj.reg(), map_result.reg(), temp1.reg(), temp2.reg()); |
| 6133 | deferred->Branch(zero); |
| 6134 | deferred->BindExit(); |
| 6135 | __ testq(map_result.reg(), map_result.reg()); |
| 6136 | obj.Unuse(); |
| 6137 | map_result.Unuse(); |
| 6138 | temp1.Unuse(); |
| 6139 | temp2.Unuse(); |
| 6140 | destination()->Split(not_equal); |
| 6141 | } |
| 6142 | |
| 6143 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6144 | void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) { |
| 6145 | // This generates a fast version of: |
| 6146 | // (%_ClassOf(arg) === 'Function') |
| 6147 | ASSERT(args->length() == 1); |
| 6148 | Load(args->at(0)); |
| 6149 | Result obj = frame_->Pop(); |
| 6150 | obj.ToRegister(); |
| 6151 | Condition is_smi = masm_->CheckSmi(obj.reg()); |
| 6152 | destination()->false_target()->Branch(is_smi); |
| 6153 | __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister); |
| 6154 | obj.Unuse(); |
| 6155 | destination()->Split(equal); |
| 6156 | } |
| 6157 | |
| 6158 | |
| 6159 | void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) { |
| 6160 | ASSERT(args->length() == 1); |
| 6161 | Load(args->at(0)); |
| 6162 | Result obj = frame_->Pop(); |
| 6163 | obj.ToRegister(); |
| 6164 | Condition is_smi = masm_->CheckSmi(obj.reg()); |
| 6165 | destination()->false_target()->Branch(is_smi); |
| 6166 | __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset)); |
| 6167 | __ movzxbl(kScratchRegister, |
| 6168 | FieldOperand(kScratchRegister, Map::kBitFieldOffset)); |
| 6169 | __ testl(kScratchRegister, Immediate(1 << Map::kIsUndetectable)); |
| 6170 | obj.Unuse(); |
| 6171 | destination()->Split(not_zero); |
| 6172 | } |
| 6173 | |
| 6174 | |
| 6175 | void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) { |
| 6176 | ASSERT(args->length() == 0); |
| 6177 | |
| 6178 | // Get the frame pointer for the calling frame. |
| 6179 | Result fp = allocator()->Allocate(); |
| 6180 | __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 6181 | |
| 6182 | // Skip the arguments adaptor frame if it exists. |
| 6183 | Label check_frame_marker; |
| 6184 | __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset), |
| 6185 | Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 6186 | __ j(not_equal, &check_frame_marker); |
| 6187 | __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset)); |
| 6188 | |
| 6189 | // Check the marker in the calling frame. |
| 6190 | __ bind(&check_frame_marker); |
| 6191 | __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset), |
| 6192 | Smi::FromInt(StackFrame::CONSTRUCT)); |
| 6193 | fp.Unuse(); |
| 6194 | destination()->Split(equal); |
| 6195 | } |
| 6196 | |
| 6197 | |
| 6198 | void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { |
| 6199 | ASSERT(args->length() == 0); |
| 6200 | |
| 6201 | Result fp = allocator_->Allocate(); |
| 6202 | Result result = allocator_->Allocate(); |
| 6203 | ASSERT(fp.is_valid() && result.is_valid()); |
| 6204 | |
| 6205 | Label exit; |
| 6206 | |
| 6207 | // Get the number of formal parameters. |
| 6208 | __ Move(result.reg(), Smi::FromInt(scope()->num_parameters())); |
| 6209 | |
| 6210 | // Check if the calling frame is an arguments adaptor frame. |
| 6211 | __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); |
| 6212 | __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset), |
| 6213 | Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
| 6214 | __ j(not_equal, &exit); |
| 6215 | |
| 6216 | // Arguments adaptor case: Read the arguments length from the |
| 6217 | // adaptor frame. |
| 6218 | __ movq(result.reg(), |
| 6219 | Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 6220 | |
| 6221 | __ bind(&exit); |
| 6222 | result.set_type_info(TypeInfo::Smi()); |
| 6223 | if (FLAG_debug_code) { |
| 6224 | __ AbortIfNotSmi(result.reg()); |
| 6225 | } |
| 6226 | frame_->Push(&result); |
| 6227 | } |
| 6228 | |
| 6229 | |
| 6230 | void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { |
| 6231 | ASSERT(args->length() == 1); |
| 6232 | JumpTarget leave, null, function, non_function_constructor; |
| 6233 | Load(args->at(0)); // Load the object. |
| 6234 | Result obj = frame_->Pop(); |
| 6235 | obj.ToRegister(); |
| 6236 | frame_->Spill(obj.reg()); |
| 6237 | |
| 6238 | // If the object is a smi, we return null. |
| 6239 | Condition is_smi = masm_->CheckSmi(obj.reg()); |
| 6240 | null.Branch(is_smi); |
| 6241 | |
| 6242 | // Check that the object is a JS object but take special care of JS |
| 6243 | // functions to make sure they have 'Function' as their class. |
| 6244 | |
| 6245 | __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg()); |
| 6246 | null.Branch(below); |
| 6247 | |
| 6248 | // As long as JS_FUNCTION_TYPE is the last instance type and it is |
| 6249 | // right after LAST_JS_OBJECT_TYPE, we can avoid checking for |
| 6250 | // LAST_JS_OBJECT_TYPE. |
| 6251 | ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); |
| 6252 | ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); |
| 6253 | __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE); |
| 6254 | function.Branch(equal); |
| 6255 | |
| 6256 | // Check if the constructor in the map is a function. |
| 6257 | __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset)); |
| 6258 | __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister); |
| 6259 | non_function_constructor.Branch(not_equal); |
| 6260 | |
| 6261 | // The obj register now contains the constructor function. Grab the |
| 6262 | // instance class name from there. |
| 6263 | __ movq(obj.reg(), |
| 6264 | FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset)); |
| 6265 | __ movq(obj.reg(), |
| 6266 | FieldOperand(obj.reg(), |
| 6267 | SharedFunctionInfo::kInstanceClassNameOffset)); |
| 6268 | frame_->Push(&obj); |
| 6269 | leave.Jump(); |
| 6270 | |
| 6271 | // Functions have class 'Function'. |
| 6272 | function.Bind(); |
| 6273 | frame_->Push(Factory::function_class_symbol()); |
| 6274 | leave.Jump(); |
| 6275 | |
| 6276 | // Objects with a non-function constructor have class 'Object'. |
| 6277 | non_function_constructor.Bind(); |
| 6278 | frame_->Push(Factory::Object_symbol()); |
| 6279 | leave.Jump(); |
| 6280 | |
| 6281 | // Non-JS objects have class null. |
| 6282 | null.Bind(); |
| 6283 | frame_->Push(Factory::null_value()); |
| 6284 | |
| 6285 | // All done. |
| 6286 | leave.Bind(); |
| 6287 | } |
| 6288 | |
| 6289 | |
| 6290 | void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { |
| 6291 | ASSERT(args->length() == 1); |
| 6292 | JumpTarget leave; |
| 6293 | Load(args->at(0)); // Load the object. |
| 6294 | frame_->Dup(); |
| 6295 | Result object = frame_->Pop(); |
| 6296 | object.ToRegister(); |
| 6297 | ASSERT(object.is_valid()); |
| 6298 | // if (object->IsSmi()) return object. |
| 6299 | Condition is_smi = masm_->CheckSmi(object.reg()); |
| 6300 | leave.Branch(is_smi); |
| 6301 | // It is a heap object - get map. |
| 6302 | Result temp = allocator()->Allocate(); |
| 6303 | ASSERT(temp.is_valid()); |
| 6304 | // if (!object->IsJSValue()) return object. |
| 6305 | __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg()); |
| 6306 | leave.Branch(not_equal); |
| 6307 | __ movq(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset)); |
| 6308 | object.Unuse(); |
| 6309 | frame_->SetElementAt(0, &temp); |
| 6310 | leave.Bind(); |
| 6311 | } |
| 6312 | |
| 6313 | |
| 6314 | void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) { |
| 6315 | ASSERT(args->length() == 2); |
| 6316 | JumpTarget leave; |
| 6317 | Load(args->at(0)); // Load the object. |
| 6318 | Load(args->at(1)); // Load the value. |
| 6319 | Result value = frame_->Pop(); |
| 6320 | Result object = frame_->Pop(); |
| 6321 | value.ToRegister(); |
| 6322 | object.ToRegister(); |
| 6323 | |
| 6324 | // if (object->IsSmi()) return value. |
| 6325 | Condition is_smi = masm_->CheckSmi(object.reg()); |
| 6326 | leave.Branch(is_smi, &value); |
| 6327 | |
| 6328 | // It is a heap object - get its map. |
| 6329 | Result scratch = allocator_->Allocate(); |
| 6330 | ASSERT(scratch.is_valid()); |
| 6331 | // if (!object->IsJSValue()) return value. |
| 6332 | __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg()); |
| 6333 | leave.Branch(not_equal, &value); |
| 6334 | |
| 6335 | // Store the value. |
| 6336 | __ movq(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg()); |
| 6337 | // Update the write barrier. Save the value as it will be |
| 6338 | // overwritten by the write barrier code and is needed afterward. |
| 6339 | Result duplicate_value = allocator_->Allocate(); |
| 6340 | ASSERT(duplicate_value.is_valid()); |
| 6341 | __ movq(duplicate_value.reg(), value.reg()); |
| 6342 | // The object register is also overwritten by the write barrier and |
| 6343 | // possibly aliased in the frame. |
| 6344 | frame_->Spill(object.reg()); |
| 6345 | __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(), |
| 6346 | scratch.reg()); |
| 6347 | object.Unuse(); |
| 6348 | scratch.Unuse(); |
| 6349 | duplicate_value.Unuse(); |
| 6350 | |
| 6351 | // Leave. |
| 6352 | leave.Bind(&value); |
| 6353 | frame_->Push(&value); |
| 6354 | } |
| 6355 | |
| 6356 | |
| 6357 | void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) { |
| 6358 | ASSERT(args->length() == 1); |
| 6359 | |
| 6360 | // ArgumentsAccessStub expects the key in rdx and the formal |
| 6361 | // parameter count in rax. |
| 6362 | Load(args->at(0)); |
| 6363 | Result key = frame_->Pop(); |
| 6364 | // Explicitly create a constant result. |
| 6365 | Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters()))); |
| 6366 | // Call the shared stub to get to arguments[key]. |
| 6367 | ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
| 6368 | Result result = frame_->CallStub(&stub, &key, &count); |
| 6369 | frame_->Push(&result); |
| 6370 | } |
| 6371 | |
| 6372 | |
| 6373 | void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) { |
| 6374 | ASSERT(args->length() == 2); |
| 6375 | |
| 6376 | // Load the two objects into registers and perform the comparison. |
| 6377 | Load(args->at(0)); |
| 6378 | Load(args->at(1)); |
| 6379 | Result right = frame_->Pop(); |
| 6380 | Result left = frame_->Pop(); |
| 6381 | right.ToRegister(); |
| 6382 | left.ToRegister(); |
| 6383 | __ cmpq(right.reg(), left.reg()); |
| 6384 | right.Unuse(); |
| 6385 | left.Unuse(); |
| 6386 | destination()->Split(equal); |
| 6387 | } |
| 6388 | |
| 6389 | |
| 6390 | void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) { |
| 6391 | ASSERT(args->length() == 0); |
| 6392 | // RBP value is aligned, so it should be tagged as a smi (without necesarily |
| 6393 | // being padded as a smi, so it should not be treated as a smi.). |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 6394 | STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6395 | Result rbp_as_smi = allocator_->Allocate(); |
| 6396 | ASSERT(rbp_as_smi.is_valid()); |
| 6397 | __ movq(rbp_as_smi.reg(), rbp); |
| 6398 | frame_->Push(&rbp_as_smi); |
| 6399 | } |
| 6400 | |
| 6401 | |
| 6402 | void CodeGenerator::GenerateRandomHeapNumber( |
| 6403 | ZoneList<Expression*>* args) { |
| 6404 | ASSERT(args->length() == 0); |
| 6405 | frame_->SpillAll(); |
| 6406 | |
| 6407 | Label slow_allocate_heapnumber; |
| 6408 | Label heapnumber_allocated; |
| 6409 | __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber); |
| 6410 | __ jmp(&heapnumber_allocated); |
| 6411 | |
| 6412 | __ bind(&slow_allocate_heapnumber); |
| 6413 | // Allocate a heap number. |
| 6414 | __ CallRuntime(Runtime::kNumberAlloc, 0); |
| 6415 | __ movq(rbx, rax); |
| 6416 | |
| 6417 | __ bind(&heapnumber_allocated); |
| 6418 | |
| 6419 | // Return a random uint32 number in rax. |
| 6420 | // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. |
| 6421 | __ PrepareCallCFunction(0); |
| 6422 | __ CallCFunction(ExternalReference::random_uint32_function(), 0); |
| 6423 | |
| 6424 | // Convert 32 random bits in rax to 0.(32 random bits) in a double |
| 6425 | // by computing: |
| 6426 | // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
| 6427 | __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. |
| 6428 | __ movd(xmm1, rcx); |
| 6429 | __ movd(xmm0, rax); |
| 6430 | __ cvtss2sd(xmm1, xmm1); |
| 6431 | __ xorpd(xmm0, xmm1); |
| 6432 | __ subsd(xmm0, xmm1); |
| 6433 | __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0); |
| 6434 | |
| 6435 | __ movq(rax, rbx); |
| 6436 | Result result = allocator_->Allocate(rax); |
| 6437 | frame_->Push(&result); |
| 6438 | } |
| 6439 | |
| 6440 | |
| 6441 | void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) { |
| 6442 | ASSERT_EQ(2, args->length()); |
| 6443 | |
| 6444 | Load(args->at(0)); |
| 6445 | Load(args->at(1)); |
| 6446 | |
| 6447 | StringAddStub stub(NO_STRING_ADD_FLAGS); |
| 6448 | Result answer = frame_->CallStub(&stub, 2); |
| 6449 | frame_->Push(&answer); |
| 6450 | } |
| 6451 | |
| 6452 | |
| 6453 | void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) { |
| 6454 | ASSERT_EQ(3, args->length()); |
| 6455 | |
| 6456 | Load(args->at(0)); |
| 6457 | Load(args->at(1)); |
| 6458 | Load(args->at(2)); |
| 6459 | |
| 6460 | SubStringStub stub; |
| 6461 | Result answer = frame_->CallStub(&stub, 3); |
| 6462 | frame_->Push(&answer); |
| 6463 | } |
| 6464 | |
| 6465 | |
| 6466 | void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) { |
| 6467 | ASSERT_EQ(2, args->length()); |
| 6468 | |
| 6469 | Load(args->at(0)); |
| 6470 | Load(args->at(1)); |
| 6471 | |
| 6472 | StringCompareStub stub; |
| 6473 | Result answer = frame_->CallStub(&stub, 2); |
| 6474 | frame_->Push(&answer); |
| 6475 | } |
| 6476 | |
| 6477 | |
| 6478 | void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) { |
| 6479 | ASSERT_EQ(args->length(), 4); |
| 6480 | |
| 6481 | // Load the arguments on the stack and call the runtime system. |
| 6482 | Load(args->at(0)); |
| 6483 | Load(args->at(1)); |
| 6484 | Load(args->at(2)); |
| 6485 | Load(args->at(3)); |
| 6486 | RegExpExecStub stub; |
| 6487 | Result result = frame_->CallStub(&stub, 4); |
| 6488 | frame_->Push(&result); |
| 6489 | } |
| 6490 | |
| 6491 | |
| 6492 | void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6493 | ASSERT_EQ(3, args->length()); |
| 6494 | Load(args->at(0)); // Size of array, smi. |
| 6495 | Load(args->at(1)); // "index" property value. |
| 6496 | Load(args->at(2)); // "input" property value. |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 6497 | RegExpConstructResultStub stub; |
| 6498 | Result result = frame_->CallStub(&stub, 3); |
| 6499 | frame_->Push(&result); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6500 | } |
| 6501 | |
| 6502 | |
| 6503 | class DeferredSearchCache: public DeferredCode { |
| 6504 | public: |
| 6505 | DeferredSearchCache(Register dst, |
| 6506 | Register cache, |
| 6507 | Register key, |
| 6508 | Register scratch) |
| 6509 | : dst_(dst), cache_(cache), key_(key), scratch_(scratch) { |
| 6510 | set_comment("[ DeferredSearchCache"); |
| 6511 | } |
| 6512 | |
| 6513 | virtual void Generate(); |
| 6514 | |
| 6515 | private: |
| 6516 | Register dst_; // on invocation index of finger (as int32), on exit |
| 6517 | // holds value being looked up. |
| 6518 | Register cache_; // instance of JSFunctionResultCache. |
| 6519 | Register key_; // key being looked up. |
| 6520 | Register scratch_; |
| 6521 | }; |
| 6522 | |
| 6523 | |
| 6524 | // Return a position of the element at |index| + |additional_offset| |
| 6525 | // in FixedArray pointer to which is held in |array|. |index| is int32. |
| 6526 | static Operand ArrayElement(Register array, |
| 6527 | Register index, |
| 6528 | int additional_offset = 0) { |
| 6529 | int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize; |
| 6530 | return FieldOperand(array, index, times_pointer_size, offset); |
| 6531 | } |
| 6532 | |
| 6533 | |
| 6534 | void DeferredSearchCache::Generate() { |
| 6535 | Label first_loop, search_further, second_loop, cache_miss; |
| 6536 | |
| 6537 | Immediate kEntriesIndexImm = Immediate(JSFunctionResultCache::kEntriesIndex); |
| 6538 | Immediate kEntrySizeImm = Immediate(JSFunctionResultCache::kEntrySize); |
| 6539 | |
| 6540 | // Check the cache from finger to start of the cache. |
| 6541 | __ bind(&first_loop); |
| 6542 | __ subl(dst_, kEntrySizeImm); |
| 6543 | __ cmpl(dst_, kEntriesIndexImm); |
| 6544 | __ j(less, &search_further); |
| 6545 | |
| 6546 | __ cmpq(ArrayElement(cache_, dst_), key_); |
| 6547 | __ j(not_equal, &first_loop); |
| 6548 | |
| 6549 | __ Integer32ToSmiField( |
| 6550 | FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_); |
| 6551 | __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
| 6552 | __ jmp(exit_label()); |
| 6553 | |
| 6554 | __ bind(&search_further); |
| 6555 | |
| 6556 | // Check the cache from end of cache up to finger. |
| 6557 | __ SmiToInteger32(dst_, |
| 6558 | FieldOperand(cache_, |
| 6559 | JSFunctionResultCache::kCacheSizeOffset)); |
| 6560 | __ SmiToInteger32(scratch_, |
| 6561 | FieldOperand(cache_, JSFunctionResultCache::kFingerOffset)); |
| 6562 | |
| 6563 | __ bind(&second_loop); |
| 6564 | __ subl(dst_, kEntrySizeImm); |
| 6565 | __ cmpl(dst_, scratch_); |
| 6566 | __ j(less_equal, &cache_miss); |
| 6567 | |
| 6568 | __ cmpq(ArrayElement(cache_, dst_), key_); |
| 6569 | __ j(not_equal, &second_loop); |
| 6570 | |
| 6571 | __ Integer32ToSmiField( |
| 6572 | FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_); |
| 6573 | __ movq(dst_, ArrayElement(cache_, dst_, 1)); |
| 6574 | __ jmp(exit_label()); |
| 6575 | |
| 6576 | __ bind(&cache_miss); |
| 6577 | __ push(cache_); // store a reference to cache |
| 6578 | __ push(key_); // store a key |
| 6579 | __ push(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 6580 | __ push(key_); |
| 6581 | // On x64 function must be in rdi. |
| 6582 | __ movq(rdi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset)); |
| 6583 | ParameterCount expected(1); |
| 6584 | __ InvokeFunction(rdi, expected, CALL_FUNCTION); |
| 6585 | |
| 6586 | // Find a place to put new cached value into. |
| 6587 | Label add_new_entry, update_cache; |
| 6588 | __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache |
| 6589 | // Possible optimization: cache size is constant for the given cache |
| 6590 | // so technically we could use a constant here. However, if we have |
| 6591 | // cache miss this optimization would hardly matter much. |
| 6592 | |
| 6593 | // Check if we could add new entry to cache. |
| 6594 | __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); |
| 6595 | __ SmiToInteger32(r9, |
| 6596 | FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset)); |
| 6597 | __ cmpl(rbx, r9); |
| 6598 | __ j(greater, &add_new_entry); |
| 6599 | |
| 6600 | // Check if we could evict entry after finger. |
| 6601 | __ SmiToInteger32(rdx, |
| 6602 | FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
| 6603 | __ addl(rdx, kEntrySizeImm); |
| 6604 | Label forward; |
| 6605 | __ cmpl(rbx, rdx); |
| 6606 | __ j(greater, &forward); |
| 6607 | // Need to wrap over the cache. |
| 6608 | __ movl(rdx, kEntriesIndexImm); |
| 6609 | __ bind(&forward); |
| 6610 | __ movl(r9, rdx); |
| 6611 | __ jmp(&update_cache); |
| 6612 | |
| 6613 | __ bind(&add_new_entry); |
| 6614 | // r9 holds cache size as int32. |
| 6615 | __ leal(rbx, Operand(r9, JSFunctionResultCache::kEntrySize)); |
| 6616 | __ Integer32ToSmiField( |
| 6617 | FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx); |
| 6618 | |
| 6619 | // Update the cache itself. |
| 6620 | // r9 holds the index as int32. |
| 6621 | __ bind(&update_cache); |
| 6622 | __ pop(rbx); // restore the key |
| 6623 | __ Integer32ToSmiField( |
| 6624 | FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9); |
| 6625 | // Store key. |
| 6626 | __ movq(ArrayElement(rcx, r9), rbx); |
| 6627 | __ RecordWrite(rcx, 0, rbx, r9); |
| 6628 | |
| 6629 | // Store value. |
| 6630 | __ pop(rcx); // restore the cache. |
| 6631 | __ SmiToInteger32(rdx, |
| 6632 | FieldOperand(rcx, JSFunctionResultCache::kFingerOffset)); |
| 6633 | __ incl(rdx); |
| 6634 | // Backup rax, because the RecordWrite macro clobbers its arguments. |
| 6635 | __ movq(rbx, rax); |
| 6636 | __ movq(ArrayElement(rcx, rdx), rax); |
| 6637 | __ RecordWrite(rcx, 0, rbx, rdx); |
| 6638 | |
| 6639 | if (!dst_.is(rax)) { |
| 6640 | __ movq(dst_, rax); |
| 6641 | } |
| 6642 | } |
| 6643 | |
| 6644 | |
| 6645 | void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { |
| 6646 | ASSERT_EQ(2, args->length()); |
| 6647 | |
| 6648 | ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
| 6649 | int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
| 6650 | |
| 6651 | Handle<FixedArray> jsfunction_result_caches( |
| 6652 | Top::global_context()->jsfunction_result_caches()); |
| 6653 | if (jsfunction_result_caches->length() <= cache_id) { |
| 6654 | __ Abort("Attempt to use undefined cache."); |
| 6655 | frame_->Push(Factory::undefined_value()); |
| 6656 | return; |
| 6657 | } |
| 6658 | |
| 6659 | Load(args->at(1)); |
| 6660 | Result key = frame_->Pop(); |
| 6661 | key.ToRegister(); |
| 6662 | |
| 6663 | Result cache = allocator()->Allocate(); |
| 6664 | ASSERT(cache.is_valid()); |
| 6665 | __ movq(cache.reg(), ContextOperand(rsi, Context::GLOBAL_INDEX)); |
| 6666 | __ movq(cache.reg(), |
| 6667 | FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset)); |
| 6668 | __ movq(cache.reg(), |
| 6669 | ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX)); |
| 6670 | __ movq(cache.reg(), |
| 6671 | FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id))); |
| 6672 | |
| 6673 | Result tmp = allocator()->Allocate(); |
| 6674 | ASSERT(tmp.is_valid()); |
| 6675 | |
| 6676 | Result scratch = allocator()->Allocate(); |
| 6677 | ASSERT(scratch.is_valid()); |
| 6678 | |
| 6679 | DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(), |
| 6680 | cache.reg(), |
| 6681 | key.reg(), |
| 6682 | scratch.reg()); |
| 6683 | |
| 6684 | const int kFingerOffset = |
| 6685 | FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex); |
| 6686 | // tmp.reg() now holds finger offset as a smi. |
| 6687 | __ SmiToInteger32(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset)); |
| 6688 | __ cmpq(key.reg(), FieldOperand(cache.reg(), |
| 6689 | tmp.reg(), times_pointer_size, |
| 6690 | FixedArray::kHeaderSize)); |
| 6691 | deferred->Branch(not_equal); |
| 6692 | __ movq(tmp.reg(), FieldOperand(cache.reg(), |
| 6693 | tmp.reg(), times_pointer_size, |
| 6694 | FixedArray::kHeaderSize + kPointerSize)); |
| 6695 | |
| 6696 | deferred->BindExit(); |
| 6697 | frame_->Push(&tmp); |
| 6698 | } |
| 6699 | |
| 6700 | |
| 6701 | void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) { |
| 6702 | ASSERT_EQ(args->length(), 1); |
| 6703 | |
| 6704 | // Load the argument on the stack and jump to the runtime. |
| 6705 | Load(args->at(0)); |
| 6706 | |
| 6707 | NumberToStringStub stub; |
| 6708 | Result result = frame_->CallStub(&stub, 1); |
| 6709 | frame_->Push(&result); |
| 6710 | } |
| 6711 | |
| 6712 | |
| 6713 | class DeferredSwapElements: public DeferredCode { |
| 6714 | public: |
| 6715 | DeferredSwapElements(Register object, Register index1, Register index2) |
| 6716 | : object_(object), index1_(index1), index2_(index2) { |
| 6717 | set_comment("[ DeferredSwapElements"); |
| 6718 | } |
| 6719 | |
| 6720 | virtual void Generate(); |
| 6721 | |
| 6722 | private: |
| 6723 | Register object_, index1_, index2_; |
| 6724 | }; |
| 6725 | |
| 6726 | |
| 6727 | void DeferredSwapElements::Generate() { |
| 6728 | __ push(object_); |
| 6729 | __ push(index1_); |
| 6730 | __ push(index2_); |
| 6731 | __ CallRuntime(Runtime::kSwapElements, 3); |
| 6732 | } |
| 6733 | |
| 6734 | |
| 6735 | void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) { |
| 6736 | Comment cmnt(masm_, "[ GenerateSwapElements"); |
| 6737 | |
| 6738 | ASSERT_EQ(3, args->length()); |
| 6739 | |
| 6740 | Load(args->at(0)); |
| 6741 | Load(args->at(1)); |
| 6742 | Load(args->at(2)); |
| 6743 | |
| 6744 | Result index2 = frame_->Pop(); |
| 6745 | index2.ToRegister(); |
| 6746 | |
| 6747 | Result index1 = frame_->Pop(); |
| 6748 | index1.ToRegister(); |
| 6749 | |
| 6750 | Result object = frame_->Pop(); |
| 6751 | object.ToRegister(); |
| 6752 | |
| 6753 | Result tmp1 = allocator()->Allocate(); |
| 6754 | tmp1.ToRegister(); |
| 6755 | Result tmp2 = allocator()->Allocate(); |
| 6756 | tmp2.ToRegister(); |
| 6757 | |
| 6758 | frame_->Spill(object.reg()); |
| 6759 | frame_->Spill(index1.reg()); |
| 6760 | frame_->Spill(index2.reg()); |
| 6761 | |
| 6762 | DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(), |
| 6763 | index1.reg(), |
| 6764 | index2.reg()); |
| 6765 | |
| 6766 | // Fetch the map and check if array is in fast case. |
| 6767 | // Check that object doesn't require security checks and |
| 6768 | // has no indexed interceptor. |
| 6769 | __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg()); |
| 6770 | deferred->Branch(below); |
| 6771 | __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset), |
| 6772 | Immediate(KeyedLoadIC::kSlowCaseBitFieldMask)); |
| 6773 | deferred->Branch(not_zero); |
| 6774 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 6775 | // Check the object's elements are in fast case and writable. |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6776 | __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset)); |
| 6777 | __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset), |
| 6778 | Heap::kFixedArrayMapRootIndex); |
| 6779 | deferred->Branch(not_equal); |
| 6780 | |
| 6781 | // Check that both indices are smis. |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 6782 | Condition both_smi = masm()->CheckBothSmi(index1.reg(), index2.reg()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6783 | deferred->Branch(NegateCondition(both_smi)); |
| 6784 | |
Ben Murdoch | db5a90a | 2011-01-06 18:27:03 +0000 | [diff] [blame] | 6785 | // Check that both indices are valid. |
| 6786 | __ movq(tmp2.reg(), FieldOperand(object.reg(), JSArray::kLengthOffset)); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 6787 | __ SmiCompare(tmp2.reg(), index1.reg()); |
Ben Murdoch | db5a90a | 2011-01-06 18:27:03 +0000 | [diff] [blame] | 6788 | deferred->Branch(below_equal); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 6789 | __ SmiCompare(tmp2.reg(), index2.reg()); |
Ben Murdoch | db5a90a | 2011-01-06 18:27:03 +0000 | [diff] [blame] | 6790 | deferred->Branch(below_equal); |
| 6791 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6792 | // Bring addresses into index1 and index2. |
| 6793 | __ SmiToInteger32(index1.reg(), index1.reg()); |
| 6794 | __ lea(index1.reg(), FieldOperand(tmp1.reg(), |
| 6795 | index1.reg(), |
| 6796 | times_pointer_size, |
| 6797 | FixedArray::kHeaderSize)); |
| 6798 | __ SmiToInteger32(index2.reg(), index2.reg()); |
| 6799 | __ lea(index2.reg(), FieldOperand(tmp1.reg(), |
| 6800 | index2.reg(), |
| 6801 | times_pointer_size, |
| 6802 | FixedArray::kHeaderSize)); |
| 6803 | |
| 6804 | // Swap elements. |
| 6805 | __ movq(object.reg(), Operand(index1.reg(), 0)); |
| 6806 | __ movq(tmp2.reg(), Operand(index2.reg(), 0)); |
| 6807 | __ movq(Operand(index2.reg(), 0), object.reg()); |
| 6808 | __ movq(Operand(index1.reg(), 0), tmp2.reg()); |
| 6809 | |
| 6810 | Label done; |
| 6811 | __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); |
| 6812 | // Possible optimization: do a check that both values are Smis |
| 6813 | // (or them and test against Smi mask.) |
| 6814 | |
| 6815 | __ movq(tmp2.reg(), tmp1.reg()); |
Steve Block | 9fac840 | 2011-05-12 15:51:54 +0100 | [diff] [blame^] | 6816 | __ RecordWriteHelper(tmp1.reg(), index1.reg(), object.reg()); |
| 6817 | __ RecordWriteHelper(tmp2.reg(), index2.reg(), object.reg()); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 6818 | __ bind(&done); |
| 6819 | |
| 6820 | deferred->BindExit(); |
| 6821 | frame_->Push(Factory::undefined_value()); |
| 6822 | } |
| 6823 | |
| 6824 | |
| 6825 | void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { |
| 6826 | Comment cmnt(masm_, "[ GenerateCallFunction"); |
| 6827 | |
| 6828 | ASSERT(args->length() >= 2); |
| 6829 | |
| 6830 | int n_args = args->length() - 2; // for receiver and function. |
| 6831 | Load(args->at(0)); // receiver |
| 6832 | for (int i = 0; i < n_args; i++) { |
| 6833 | Load(args->at(i + 1)); |
| 6834 | } |
| 6835 | Load(args->at(n_args + 1)); // function |
| 6836 | Result result = frame_->CallJSFunction(n_args); |
| 6837 | frame_->Push(&result); |
| 6838 | } |
| 6839 | |
| 6840 | |
| 6841 | // Generates the Math.pow method. Only handles special cases and |
| 6842 | // branches to the runtime system for everything else. Please note |
| 6843 | // that this function assumes that the callsite has executed ToNumber |
| 6844 | // on both arguments. |
| 6845 | void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) { |
| 6846 | ASSERT(args->length() == 2); |
| 6847 | Load(args->at(0)); |
| 6848 | Load(args->at(1)); |
| 6849 | |
| 6850 | Label allocate_return; |
| 6851 | // Load the two operands while leaving the values on the frame. |
| 6852 | frame()->Dup(); |
| 6853 | Result exponent = frame()->Pop(); |
| 6854 | exponent.ToRegister(); |
| 6855 | frame()->Spill(exponent.reg()); |
| 6856 | frame()->PushElementAt(1); |
| 6857 | Result base = frame()->Pop(); |
| 6858 | base.ToRegister(); |
| 6859 | frame()->Spill(base.reg()); |
| 6860 | |
| 6861 | Result answer = allocator()->Allocate(); |
| 6862 | ASSERT(answer.is_valid()); |
| 6863 | ASSERT(!exponent.reg().is(base.reg())); |
| 6864 | JumpTarget call_runtime; |
| 6865 | |
| 6866 | // Save 1 in xmm3 - we need this several times later on. |
| 6867 | __ movl(answer.reg(), Immediate(1)); |
| 6868 | __ cvtlsi2sd(xmm3, answer.reg()); |
| 6869 | |
| 6870 | Label exponent_nonsmi; |
| 6871 | Label base_nonsmi; |
| 6872 | // If the exponent is a heap number go to that specific case. |
| 6873 | __ JumpIfNotSmi(exponent.reg(), &exponent_nonsmi); |
| 6874 | __ JumpIfNotSmi(base.reg(), &base_nonsmi); |
| 6875 | |
| 6876 | // Optimized version when y is an integer. |
| 6877 | Label powi; |
| 6878 | __ SmiToInteger32(base.reg(), base.reg()); |
| 6879 | __ cvtlsi2sd(xmm0, base.reg()); |
| 6880 | __ jmp(&powi); |
| 6881 | // exponent is smi and base is a heapnumber. |
| 6882 | __ bind(&base_nonsmi); |
| 6883 | __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset), |
| 6884 | Heap::kHeapNumberMapRootIndex); |
| 6885 | call_runtime.Branch(not_equal); |
| 6886 | |
| 6887 | __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); |
| 6888 | |
| 6889 | // Optimized version of pow if y is an integer. |
| 6890 | __ bind(&powi); |
| 6891 | __ SmiToInteger32(exponent.reg(), exponent.reg()); |
| 6892 | |
| 6893 | // Save exponent in base as we need to check if exponent is negative later. |
| 6894 | // We know that base and exponent are in different registers. |
| 6895 | __ movl(base.reg(), exponent.reg()); |
| 6896 | |
| 6897 | // Get absolute value of exponent. |
| 6898 | Label no_neg; |
| 6899 | __ cmpl(exponent.reg(), Immediate(0)); |
| 6900 | __ j(greater_equal, &no_neg); |
| 6901 | __ negl(exponent.reg()); |
| 6902 | __ bind(&no_neg); |
| 6903 | |
| 6904 | // Load xmm1 with 1. |
| 6905 | __ movsd(xmm1, xmm3); |
| 6906 | Label while_true; |
| 6907 | Label no_multiply; |
| 6908 | |
| 6909 | __ bind(&while_true); |
| 6910 | __ shrl(exponent.reg(), Immediate(1)); |
| 6911 | __ j(not_carry, &no_multiply); |
| 6912 | __ mulsd(xmm1, xmm0); |
| 6913 | __ bind(&no_multiply); |
| 6914 | __ testl(exponent.reg(), exponent.reg()); |
| 6915 | __ mulsd(xmm0, xmm0); |
| 6916 | __ j(not_zero, &while_true); |
| 6917 | |
| 6918 | // x has the original value of y - if y is negative return 1/result. |
| 6919 | __ testl(base.reg(), base.reg()); |
| 6920 | __ j(positive, &allocate_return); |
| 6921 | // Special case if xmm1 has reached infinity. |
| 6922 | __ movl(answer.reg(), Immediate(0x7FB00000)); |
| 6923 | __ movd(xmm0, answer.reg()); |
| 6924 | __ cvtss2sd(xmm0, xmm0); |
| 6925 | __ ucomisd(xmm0, xmm1); |
| 6926 | call_runtime.Branch(equal); |
| 6927 | __ divsd(xmm3, xmm1); |
| 6928 | __ movsd(xmm1, xmm3); |
| 6929 | __ jmp(&allocate_return); |
| 6930 | |
| 6931 | // exponent (or both) is a heapnumber - no matter what we should now work |
| 6932 | // on doubles. |
| 6933 | __ bind(&exponent_nonsmi); |
| 6934 | __ CompareRoot(FieldOperand(exponent.reg(), HeapObject::kMapOffset), |
| 6935 | Heap::kHeapNumberMapRootIndex); |
| 6936 | call_runtime.Branch(not_equal); |
| 6937 | __ movsd(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset)); |
| 6938 | // Test if exponent is nan. |
| 6939 | __ ucomisd(xmm1, xmm1); |
| 6940 | call_runtime.Branch(parity_even); |
| 6941 | |
| 6942 | Label base_not_smi; |
| 6943 | Label handle_special_cases; |
| 6944 | __ JumpIfNotSmi(base.reg(), &base_not_smi); |
| 6945 | __ SmiToInteger32(base.reg(), base.reg()); |
| 6946 | __ cvtlsi2sd(xmm0, base.reg()); |
| 6947 | __ jmp(&handle_special_cases); |
| 6948 | __ bind(&base_not_smi); |
| 6949 | __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset), |
| 6950 | Heap::kHeapNumberMapRootIndex); |
| 6951 | call_runtime.Branch(not_equal); |
| 6952 | __ movl(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset)); |
| 6953 | __ andl(answer.reg(), Immediate(HeapNumber::kExponentMask)); |
| 6954 | __ cmpl(answer.reg(), Immediate(HeapNumber::kExponentMask)); |
| 6955 | // base is NaN or +/-Infinity |
| 6956 | call_runtime.Branch(greater_equal); |
| 6957 | __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset)); |
| 6958 | |
| 6959 | // base is in xmm0 and exponent is in xmm1. |
| 6960 | __ bind(&handle_special_cases); |
| 6961 | Label not_minus_half; |
| 6962 | // Test for -0.5. |
| 6963 | // Load xmm2 with -0.5. |
| 6964 | __ movl(answer.reg(), Immediate(0xBF000000)); |
| 6965 | __ movd(xmm2, answer.reg()); |
| 6966 | __ cvtss2sd(xmm2, xmm2); |
| 6967 | // xmm2 now has -0.5. |
| 6968 | __ ucomisd(xmm2, xmm1); |
| 6969 | __ j(not_equal, ¬_minus_half); |
| 6970 | |
| 6971 | // Calculates reciprocal of square root. |
| 6972 | // Note that 1/sqrt(x) = sqrt(1/x)) |
| 6973 | __ divsd(xmm3, xmm0); |
| 6974 | __ movsd(xmm1, xmm3); |
| 6975 | __ sqrtsd(xmm1, xmm1); |
| 6976 | __ jmp(&allocate_return); |
| 6977 | |
| 6978 | // Test for 0.5. |
| 6979 | __ bind(¬_minus_half); |
| 6980 | // Load xmm2 with 0.5. |
| 6981 | // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3. |
| 6982 | __ addsd(xmm2, xmm3); |
| 6983 | // xmm2 now has 0.5. |
| 6984 | __ ucomisd(xmm2, xmm1); |
| 6985 | call_runtime.Branch(not_equal); |
| 6986 | |
| 6987 | // Calculates square root. |
| 6988 | __ movsd(xmm1, xmm0); |
| 6989 | __ sqrtsd(xmm1, xmm1); |
| 6990 | |
| 6991 | JumpTarget done; |
| 6992 | Label failure, success; |
| 6993 | __ bind(&allocate_return); |
| 6994 | // Make a copy of the frame to enable us to handle allocation |
| 6995 | // failure after the JumpTarget jump. |
| 6996 | VirtualFrame* clone = new VirtualFrame(frame()); |
| 6997 | __ AllocateHeapNumber(answer.reg(), exponent.reg(), &failure); |
| 6998 | __ movsd(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1); |
| 6999 | // Remove the two original values from the frame - we only need those |
| 7000 | // in the case where we branch to runtime. |
| 7001 | frame()->Drop(2); |
| 7002 | exponent.Unuse(); |
| 7003 | base.Unuse(); |
| 7004 | done.Jump(&answer); |
| 7005 | // Use the copy of the original frame as our current frame. |
| 7006 | RegisterFile empty_regs; |
| 7007 | SetFrame(clone, &empty_regs); |
| 7008 | // If we experience an allocation failure we branch to runtime. |
| 7009 | __ bind(&failure); |
| 7010 | call_runtime.Bind(); |
| 7011 | answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2); |
| 7012 | |
| 7013 | done.Bind(&answer); |
| 7014 | frame()->Push(&answer); |
| 7015 | } |
| 7016 | |
| 7017 | |
| 7018 | void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) { |
| 7019 | ASSERT_EQ(args->length(), 1); |
| 7020 | Load(args->at(0)); |
| 7021 | TranscendentalCacheStub stub(TranscendentalCache::SIN); |
| 7022 | Result result = frame_->CallStub(&stub, 1); |
| 7023 | frame_->Push(&result); |
| 7024 | } |
| 7025 | |
| 7026 | |
| 7027 | void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) { |
| 7028 | ASSERT_EQ(args->length(), 1); |
| 7029 | Load(args->at(0)); |
| 7030 | TranscendentalCacheStub stub(TranscendentalCache::COS); |
| 7031 | Result result = frame_->CallStub(&stub, 1); |
| 7032 | frame_->Push(&result); |
| 7033 | } |
| 7034 | |
| 7035 | |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 7036 | void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) { |
| 7037 | ASSERT_EQ(args->length(), 1); |
| 7038 | Load(args->at(0)); |
| 7039 | TranscendentalCacheStub stub(TranscendentalCache::LOG); |
| 7040 | Result result = frame_->CallStub(&stub, 1); |
| 7041 | frame_->Push(&result); |
| 7042 | } |
| 7043 | |
| 7044 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 7045 | // Generates the Math.sqrt method. Please note - this function assumes that |
| 7046 | // the callsite has executed ToNumber on the argument. |
| 7047 | void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) { |
| 7048 | ASSERT(args->length() == 1); |
| 7049 | Load(args->at(0)); |
| 7050 | |
| 7051 | // Leave original value on the frame if we need to call runtime. |
| 7052 | frame()->Dup(); |
| 7053 | Result result = frame()->Pop(); |
| 7054 | result.ToRegister(); |
| 7055 | frame()->Spill(result.reg()); |
| 7056 | Label runtime; |
| 7057 | Label non_smi; |
| 7058 | Label load_done; |
| 7059 | JumpTarget end; |
| 7060 | |
| 7061 | __ JumpIfNotSmi(result.reg(), &non_smi); |
| 7062 | __ SmiToInteger32(result.reg(), result.reg()); |
| 7063 | __ cvtlsi2sd(xmm0, result.reg()); |
| 7064 | __ jmp(&load_done); |
| 7065 | __ bind(&non_smi); |
| 7066 | __ CompareRoot(FieldOperand(result.reg(), HeapObject::kMapOffset), |
| 7067 | Heap::kHeapNumberMapRootIndex); |
| 7068 | __ j(not_equal, &runtime); |
| 7069 | __ movsd(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset)); |
| 7070 | |
| 7071 | __ bind(&load_done); |
| 7072 | __ sqrtsd(xmm0, xmm0); |
| 7073 | // A copy of the virtual frame to allow us to go to runtime after the |
| 7074 | // JumpTarget jump. |
| 7075 | Result scratch = allocator()->Allocate(); |
| 7076 | VirtualFrame* clone = new VirtualFrame(frame()); |
| 7077 | __ AllocateHeapNumber(result.reg(), scratch.reg(), &runtime); |
| 7078 | |
| 7079 | __ movsd(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0); |
| 7080 | frame()->Drop(1); |
| 7081 | scratch.Unuse(); |
| 7082 | end.Jump(&result); |
| 7083 | // We only branch to runtime if we have an allocation error. |
| 7084 | // Use the copy of the original frame as our current frame. |
| 7085 | RegisterFile empty_regs; |
| 7086 | SetFrame(clone, &empty_regs); |
| 7087 | __ bind(&runtime); |
| 7088 | result = frame()->CallRuntime(Runtime::kMath_sqrt, 1); |
| 7089 | |
| 7090 | end.Bind(&result); |
| 7091 | frame()->Push(&result); |
| 7092 | } |
| 7093 | |
| 7094 | |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 7095 | void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) { |
| 7096 | ASSERT_EQ(2, args->length()); |
| 7097 | Load(args->at(0)); |
| 7098 | Load(args->at(1)); |
| 7099 | Result right_res = frame_->Pop(); |
| 7100 | Result left_res = frame_->Pop(); |
| 7101 | right_res.ToRegister(); |
| 7102 | left_res.ToRegister(); |
| 7103 | Result tmp_res = allocator()->Allocate(); |
| 7104 | ASSERT(tmp_res.is_valid()); |
| 7105 | Register right = right_res.reg(); |
| 7106 | Register left = left_res.reg(); |
| 7107 | Register tmp = tmp_res.reg(); |
| 7108 | right_res.Unuse(); |
| 7109 | left_res.Unuse(); |
| 7110 | tmp_res.Unuse(); |
| 7111 | __ cmpq(left, right); |
| 7112 | destination()->true_target()->Branch(equal); |
| 7113 | // Fail if either is a non-HeapObject. |
| 7114 | Condition either_smi = |
| 7115 | masm()->CheckEitherSmi(left, right, tmp); |
| 7116 | destination()->false_target()->Branch(either_smi); |
| 7117 | __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); |
| 7118 | __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), |
| 7119 | Immediate(JS_REGEXP_TYPE)); |
| 7120 | destination()->false_target()->Branch(not_equal); |
| 7121 | __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); |
| 7122 | destination()->false_target()->Branch(not_equal); |
| 7123 | __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); |
| 7124 | __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); |
| 7125 | destination()->Split(equal); |
| 7126 | } |
| 7127 | |
| 7128 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 7129 | void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) { |
| 7130 | ASSERT(args->length() == 1); |
| 7131 | Load(args->at(0)); |
| 7132 | Result value = frame_->Pop(); |
| 7133 | value.ToRegister(); |
| 7134 | ASSERT(value.is_valid()); |
| 7135 | __ testl(FieldOperand(value.reg(), String::kHashFieldOffset), |
| 7136 | Immediate(String::kContainsCachedArrayIndexMask)); |
| 7137 | value.Unuse(); |
| 7138 | destination()->Split(zero); |
| 7139 | } |
| 7140 | |
| 7141 | |
| 7142 | void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) { |
| 7143 | ASSERT(args->length() == 1); |
| 7144 | Load(args->at(0)); |
| 7145 | Result string = frame_->Pop(); |
| 7146 | string.ToRegister(); |
| 7147 | |
| 7148 | Result number = allocator()->Allocate(); |
| 7149 | ASSERT(number.is_valid()); |
| 7150 | __ movl(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset)); |
| 7151 | __ IndexFromHash(number.reg(), number.reg()); |
| 7152 | string.Unuse(); |
| 7153 | frame_->Push(&number); |
| 7154 | } |
| 7155 | |
| 7156 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 7157 | void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) { |
| 7158 | frame_->Push(Factory::undefined_value()); |
| 7159 | } |
| 7160 | |
| 7161 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7162 | void CodeGenerator::VisitCallRuntime(CallRuntime* node) { |
| 7163 | if (CheckForInlineRuntimeCall(node)) { |
| 7164 | return; |
| 7165 | } |
| 7166 | |
| 7167 | ZoneList<Expression*>* args = node->arguments(); |
| 7168 | Comment cmnt(masm_, "[ CallRuntime"); |
| 7169 | Runtime::Function* function = node->function(); |
| 7170 | |
| 7171 | if (function == NULL) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7172 | // Push the builtins object found in the current global object. |
| 7173 | Result temp = allocator()->Allocate(); |
| 7174 | ASSERT(temp.is_valid()); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 7175 | __ movq(temp.reg(), GlobalObjectOperand()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7176 | __ movq(temp.reg(), |
| 7177 | FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset)); |
| 7178 | frame_->Push(&temp); |
| 7179 | } |
| 7180 | |
| 7181 | // Push the arguments ("left-to-right"). |
| 7182 | int arg_count = args->length(); |
| 7183 | for (int i = 0; i < arg_count; i++) { |
| 7184 | Load(args->at(i)); |
| 7185 | } |
| 7186 | |
| 7187 | if (function == NULL) { |
| 7188 | // Call the JS runtime function. |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 7189 | frame_->Push(node->name()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7190 | Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET, |
| 7191 | arg_count, |
| 7192 | loop_nesting_); |
| 7193 | frame_->RestoreContextRegister(); |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 7194 | frame_->Push(&answer); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7195 | } else { |
| 7196 | // Call the C runtime function. |
| 7197 | Result answer = frame_->CallRuntime(function, arg_count); |
| 7198 | frame_->Push(&answer); |
| 7199 | } |
| 7200 | } |
| 7201 | |
| 7202 | |
| 7203 | void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7204 | Comment cmnt(masm_, "[ UnaryOperation"); |
| 7205 | |
| 7206 | Token::Value op = node->op(); |
| 7207 | |
| 7208 | if (op == Token::NOT) { |
| 7209 | // Swap the true and false targets but keep the same actual label |
| 7210 | // as the fall through. |
| 7211 | destination()->Invert(); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7212 | LoadCondition(node->expression(), destination(), true); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7213 | // Swap the labels back. |
| 7214 | destination()->Invert(); |
| 7215 | |
| 7216 | } else if (op == Token::DELETE) { |
| 7217 | Property* property = node->expression()->AsProperty(); |
| 7218 | if (property != NULL) { |
| 7219 | Load(property->obj()); |
| 7220 | Load(property->key()); |
| 7221 | Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2); |
| 7222 | frame_->Push(&answer); |
| 7223 | return; |
| 7224 | } |
| 7225 | |
| 7226 | Variable* variable = node->expression()->AsVariableProxy()->AsVariable(); |
| 7227 | if (variable != NULL) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 7228 | Slot* slot = variable->AsSlot(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7229 | if (variable->is_global()) { |
| 7230 | LoadGlobal(); |
| 7231 | frame_->Push(variable->name()); |
| 7232 | Result answer = frame_->InvokeBuiltin(Builtins::DELETE, |
| 7233 | CALL_FUNCTION, 2); |
| 7234 | frame_->Push(&answer); |
| 7235 | return; |
| 7236 | |
| 7237 | } else if (slot != NULL && slot->type() == Slot::LOOKUP) { |
| 7238 | // Call the runtime to look up the context holding the named |
| 7239 | // variable. Sync the virtual frame eagerly so we can push the |
| 7240 | // arguments directly into place. |
| 7241 | frame_->SyncRange(0, frame_->element_count() - 1); |
| 7242 | frame_->EmitPush(rsi); |
| 7243 | frame_->EmitPush(variable->name()); |
| 7244 | Result context = frame_->CallRuntime(Runtime::kLookupContext, 2); |
| 7245 | ASSERT(context.is_register()); |
| 7246 | frame_->EmitPush(context.reg()); |
| 7247 | context.Unuse(); |
| 7248 | frame_->EmitPush(variable->name()); |
| 7249 | Result answer = frame_->InvokeBuiltin(Builtins::DELETE, |
| 7250 | CALL_FUNCTION, 2); |
| 7251 | frame_->Push(&answer); |
| 7252 | return; |
| 7253 | } |
| 7254 | |
| 7255 | // Default: Result of deleting non-global, not dynamically |
| 7256 | // introduced variables is false. |
| 7257 | frame_->Push(Factory::false_value()); |
| 7258 | |
| 7259 | } else { |
| 7260 | // Default: Result of deleting expressions is true. |
| 7261 | Load(node->expression()); // may have side-effects |
| 7262 | frame_->SetElementAt(0, Factory::true_value()); |
| 7263 | } |
| 7264 | |
| 7265 | } else if (op == Token::TYPEOF) { |
| 7266 | // Special case for loading the typeof expression; see comment on |
| 7267 | // LoadTypeofExpression(). |
| 7268 | LoadTypeofExpression(node->expression()); |
| 7269 | Result answer = frame_->CallRuntime(Runtime::kTypeof, 1); |
| 7270 | frame_->Push(&answer); |
| 7271 | |
| 7272 | } else if (op == Token::VOID) { |
| 7273 | Expression* expression = node->expression(); |
| 7274 | if (expression && expression->AsLiteral() && ( |
| 7275 | expression->AsLiteral()->IsTrue() || |
| 7276 | expression->AsLiteral()->IsFalse() || |
| 7277 | expression->AsLiteral()->handle()->IsNumber() || |
| 7278 | expression->AsLiteral()->handle()->IsString() || |
| 7279 | expression->AsLiteral()->handle()->IsJSRegExp() || |
| 7280 | expression->AsLiteral()->IsNull())) { |
| 7281 | // Omit evaluating the value of the primitive literal. |
| 7282 | // It will be discarded anyway, and can have no side effect. |
| 7283 | frame_->Push(Factory::undefined_value()); |
| 7284 | } else { |
| 7285 | Load(node->expression()); |
| 7286 | frame_->SetElementAt(0, Factory::undefined_value()); |
| 7287 | } |
| 7288 | |
| 7289 | } else { |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 7290 | bool can_overwrite = node->expression()->ResultOverwriteAllowed(); |
Leon Clarke | ac95265 | 2010-07-15 11:15:24 +0100 | [diff] [blame] | 7291 | UnaryOverwriteMode overwrite = |
| 7292 | can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; |
| 7293 | bool no_negative_zero = node->expression()->no_negative_zero(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7294 | Load(node->expression()); |
| 7295 | switch (op) { |
| 7296 | case Token::NOT: |
| 7297 | case Token::DELETE: |
| 7298 | case Token::TYPEOF: |
| 7299 | UNREACHABLE(); // handled above |
| 7300 | break; |
| 7301 | |
| 7302 | case Token::SUB: { |
Leon Clarke | ac95265 | 2010-07-15 11:15:24 +0100 | [diff] [blame] | 7303 | GenericUnaryOpStub stub( |
| 7304 | Token::SUB, |
| 7305 | overwrite, |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 7306 | NO_UNARY_FLAGS, |
Leon Clarke | ac95265 | 2010-07-15 11:15:24 +0100 | [diff] [blame] | 7307 | no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7308 | Result operand = frame_->Pop(); |
| 7309 | Result answer = frame_->CallStub(&stub, &operand); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7310 | answer.set_type_info(TypeInfo::Number()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7311 | frame_->Push(&answer); |
| 7312 | break; |
| 7313 | } |
| 7314 | |
| 7315 | case Token::BIT_NOT: { |
| 7316 | // Smi check. |
| 7317 | JumpTarget smi_label; |
| 7318 | JumpTarget continue_label; |
| 7319 | Result operand = frame_->Pop(); |
| 7320 | operand.ToRegister(); |
| 7321 | |
| 7322 | Condition is_smi = masm_->CheckSmi(operand.reg()); |
| 7323 | smi_label.Branch(is_smi, &operand); |
| 7324 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 7325 | GenericUnaryOpStub stub(Token::BIT_NOT, |
| 7326 | overwrite, |
| 7327 | NO_UNARY_SMI_CODE_IN_STUB); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 7328 | Result answer = frame_->CallStub(&stub, &operand); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7329 | continue_label.Jump(&answer); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 7330 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7331 | smi_label.Bind(&answer); |
| 7332 | answer.ToRegister(); |
| 7333 | frame_->Spill(answer.reg()); |
| 7334 | __ SmiNot(answer.reg(), answer.reg()); |
| 7335 | continue_label.Bind(&answer); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7336 | answer.set_type_info(TypeInfo::Smi()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7337 | frame_->Push(&answer); |
| 7338 | break; |
| 7339 | } |
| 7340 | |
| 7341 | case Token::ADD: { |
| 7342 | // Smi check. |
| 7343 | JumpTarget continue_label; |
| 7344 | Result operand = frame_->Pop(); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7345 | TypeInfo operand_info = operand.type_info(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7346 | operand.ToRegister(); |
| 7347 | Condition is_smi = masm_->CheckSmi(operand.reg()); |
| 7348 | continue_label.Branch(is_smi, &operand); |
| 7349 | frame_->Push(&operand); |
| 7350 | Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER, |
| 7351 | CALL_FUNCTION, 1); |
| 7352 | |
| 7353 | continue_label.Bind(&answer); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7354 | if (operand_info.IsSmi()) { |
| 7355 | answer.set_type_info(TypeInfo::Smi()); |
| 7356 | } else if (operand_info.IsInteger32()) { |
| 7357 | answer.set_type_info(TypeInfo::Integer32()); |
| 7358 | } else { |
| 7359 | answer.set_type_info(TypeInfo::Number()); |
| 7360 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7361 | frame_->Push(&answer); |
| 7362 | break; |
| 7363 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7364 | default: |
| 7365 | UNREACHABLE(); |
| 7366 | } |
| 7367 | } |
| 7368 | } |
| 7369 | |
| 7370 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7371 | // The value in dst was optimistically incremented or decremented. |
| 7372 | // The result overflowed or was not smi tagged. Call into the runtime |
| 7373 | // to convert the argument to a number, and call the specialized add |
| 7374 | // or subtract stub. The result is left in dst. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7375 | class DeferredPrefixCountOperation: public DeferredCode { |
| 7376 | public: |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7377 | DeferredPrefixCountOperation(Register dst, |
| 7378 | bool is_increment, |
| 7379 | TypeInfo input_type) |
| 7380 | : dst_(dst), is_increment_(is_increment), input_type_(input_type) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7381 | set_comment("[ DeferredCountOperation"); |
| 7382 | } |
| 7383 | |
| 7384 | virtual void Generate(); |
| 7385 | |
| 7386 | private: |
| 7387 | Register dst_; |
| 7388 | bool is_increment_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7389 | TypeInfo input_type_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7390 | }; |
| 7391 | |
| 7392 | |
| 7393 | void DeferredPrefixCountOperation::Generate() { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7394 | Register left; |
| 7395 | if (input_type_.IsNumber()) { |
| 7396 | left = dst_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7397 | } else { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7398 | __ push(dst_); |
| 7399 | __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); |
| 7400 | left = rax; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7401 | } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7402 | |
| 7403 | GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB, |
| 7404 | NO_OVERWRITE, |
| 7405 | NO_GENERIC_BINARY_FLAGS, |
| 7406 | TypeInfo::Number()); |
| 7407 | stub.GenerateCall(masm_, left, Smi::FromInt(1)); |
| 7408 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7409 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 7410 | } |
| 7411 | |
| 7412 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7413 | // The value in dst was optimistically incremented or decremented. |
| 7414 | // The result overflowed or was not smi tagged. Call into the runtime |
| 7415 | // to convert the argument to a number. Update the original value in |
| 7416 | // old. Call the specialized add or subtract stub. The result is |
| 7417 | // left in dst. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7418 | class DeferredPostfixCountOperation: public DeferredCode { |
| 7419 | public: |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7420 | DeferredPostfixCountOperation(Register dst, |
| 7421 | Register old, |
| 7422 | bool is_increment, |
| 7423 | TypeInfo input_type) |
| 7424 | : dst_(dst), |
| 7425 | old_(old), |
| 7426 | is_increment_(is_increment), |
| 7427 | input_type_(input_type) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7428 | set_comment("[ DeferredCountOperation"); |
| 7429 | } |
| 7430 | |
| 7431 | virtual void Generate(); |
| 7432 | |
| 7433 | private: |
| 7434 | Register dst_; |
| 7435 | Register old_; |
| 7436 | bool is_increment_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7437 | TypeInfo input_type_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7438 | }; |
| 7439 | |
| 7440 | |
| 7441 | void DeferredPostfixCountOperation::Generate() { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7442 | Register left; |
| 7443 | if (input_type_.IsNumber()) { |
| 7444 | __ push(dst_); // Save the input to use as the old value. |
| 7445 | left = dst_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7446 | } else { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7447 | __ push(dst_); |
| 7448 | __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); |
| 7449 | __ push(rax); // Save the result of ToNumber to use as the old value. |
| 7450 | left = rax; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7451 | } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7452 | |
| 7453 | GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB, |
| 7454 | NO_OVERWRITE, |
| 7455 | NO_GENERIC_BINARY_FLAGS, |
| 7456 | TypeInfo::Number()); |
| 7457 | stub.GenerateCall(masm_, left, Smi::FromInt(1)); |
| 7458 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7459 | if (!dst_.is(rax)) __ movq(dst_, rax); |
| 7460 | __ pop(old_); |
| 7461 | } |
| 7462 | |
| 7463 | |
| 7464 | void CodeGenerator::VisitCountOperation(CountOperation* node) { |
| 7465 | Comment cmnt(masm_, "[ CountOperation"); |
| 7466 | |
| 7467 | bool is_postfix = node->is_postfix(); |
| 7468 | bool is_increment = node->op() == Token::INC; |
| 7469 | |
| 7470 | Variable* var = node->expression()->AsVariableProxy()->AsVariable(); |
| 7471 | bool is_const = (var != NULL && var->mode() == Variable::CONST); |
| 7472 | |
| 7473 | // Postfix operations need a stack slot under the reference to hold |
| 7474 | // the old value while the new value is being stored. This is so that |
| 7475 | // in the case that storing the new value requires a call, the old |
| 7476 | // value will be in the frame to be spilled. |
| 7477 | if (is_postfix) frame_->Push(Smi::FromInt(0)); |
| 7478 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 7479 | // A constant reference is not saved to, so the reference is not a |
| 7480 | // compound assignment reference. |
| 7481 | { Reference target(this, node->expression(), !is_const); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7482 | if (target.is_illegal()) { |
| 7483 | // Spoof the virtual frame to have the expected height (one higher |
| 7484 | // than on entry). |
| 7485 | if (!is_postfix) frame_->Push(Smi::FromInt(0)); |
| 7486 | return; |
| 7487 | } |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7488 | target.TakeValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7489 | |
| 7490 | Result new_value = frame_->Pop(); |
| 7491 | new_value.ToRegister(); |
| 7492 | |
| 7493 | Result old_value; // Only allocated in the postfix case. |
| 7494 | if (is_postfix) { |
| 7495 | // Allocate a temporary to preserve the old value. |
| 7496 | old_value = allocator_->Allocate(); |
| 7497 | ASSERT(old_value.is_valid()); |
| 7498 | __ movq(old_value.reg(), new_value.reg()); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7499 | |
| 7500 | // The return value for postfix operations is ToNumber(input). |
| 7501 | // Keep more precise type info if the input is some kind of |
| 7502 | // number already. If the input is not a number we have to wait |
| 7503 | // for the deferred code to convert it. |
| 7504 | if (new_value.type_info().IsNumber()) { |
| 7505 | old_value.set_type_info(new_value.type_info()); |
| 7506 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7507 | } |
| 7508 | // Ensure the new value is writable. |
| 7509 | frame_->Spill(new_value.reg()); |
| 7510 | |
| 7511 | DeferredCode* deferred = NULL; |
| 7512 | if (is_postfix) { |
| 7513 | deferred = new DeferredPostfixCountOperation(new_value.reg(), |
| 7514 | old_value.reg(), |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7515 | is_increment, |
| 7516 | new_value.type_info()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7517 | } else { |
| 7518 | deferred = new DeferredPrefixCountOperation(new_value.reg(), |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7519 | is_increment, |
| 7520 | new_value.type_info()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7521 | } |
| 7522 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 7523 | if (new_value.is_smi()) { |
| 7524 | if (FLAG_debug_code) { __ AbortIfNotSmi(new_value.reg()); } |
| 7525 | } else { |
| 7526 | __ JumpIfNotSmi(new_value.reg(), deferred->entry_label()); |
| 7527 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7528 | if (is_increment) { |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 7529 | __ SmiAddConstant(new_value.reg(), |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 7530 | new_value.reg(), |
| 7531 | Smi::FromInt(1), |
| 7532 | deferred->entry_label()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7533 | } else { |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 7534 | __ SmiSubConstant(new_value.reg(), |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 7535 | new_value.reg(), |
| 7536 | Smi::FromInt(1), |
| 7537 | deferred->entry_label()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7538 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7539 | deferred->BindExit(); |
| 7540 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7541 | // Postfix count operations return their input converted to |
| 7542 | // number. The case when the input is already a number is covered |
| 7543 | // above in the allocation code for old_value. |
| 7544 | if (is_postfix && !new_value.type_info().IsNumber()) { |
| 7545 | old_value.set_type_info(TypeInfo::Number()); |
| 7546 | } |
| 7547 | |
| 7548 | new_value.set_type_info(TypeInfo::Number()); |
| 7549 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7550 | // Postfix: store the old value in the allocated slot under the |
| 7551 | // reference. |
| 7552 | if (is_postfix) frame_->SetElementAt(target.size(), &old_value); |
| 7553 | |
| 7554 | frame_->Push(&new_value); |
| 7555 | // Non-constant: update the reference. |
| 7556 | if (!is_const) target.SetValue(NOT_CONST_INIT); |
| 7557 | } |
| 7558 | |
| 7559 | // Postfix: drop the new value and use the old. |
| 7560 | if (is_postfix) frame_->Drop(); |
| 7561 | } |
| 7562 | |
| 7563 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7564 | void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7565 | // According to ECMA-262 section 11.11, page 58, the binary logical |
| 7566 | // operators must yield the result of one of the two expressions |
| 7567 | // before any ToBoolean() conversions. This means that the value |
| 7568 | // produced by a && or || operator is not necessarily a boolean. |
| 7569 | |
| 7570 | // NOTE: If the left hand side produces a materialized value (not |
| 7571 | // control flow), we force the right hand side to do the same. This |
| 7572 | // is necessary because we assume that if we get control flow on the |
| 7573 | // last path out of an expression we got it on all paths. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7574 | if (node->op() == Token::AND) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7575 | JumpTarget is_true; |
| 7576 | ControlDestination dest(&is_true, destination()->false_target(), true); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7577 | LoadCondition(node->left(), &dest, false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7578 | |
| 7579 | if (dest.false_was_fall_through()) { |
| 7580 | // The current false target was used as the fall-through. If |
| 7581 | // there are no dangling jumps to is_true then the left |
| 7582 | // subexpression was unconditionally false. Otherwise we have |
| 7583 | // paths where we do have to evaluate the right subexpression. |
| 7584 | if (is_true.is_linked()) { |
| 7585 | // We need to compile the right subexpression. If the jump to |
| 7586 | // the current false target was a forward jump then we have a |
| 7587 | // valid frame, we have just bound the false target, and we |
| 7588 | // have to jump around the code for the right subexpression. |
| 7589 | if (has_valid_frame()) { |
| 7590 | destination()->false_target()->Unuse(); |
| 7591 | destination()->false_target()->Jump(); |
| 7592 | } |
| 7593 | is_true.Bind(); |
| 7594 | // The left subexpression compiled to control flow, so the |
| 7595 | // right one is free to do so as well. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7596 | LoadCondition(node->right(), destination(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7597 | } else { |
| 7598 | // We have actually just jumped to or bound the current false |
| 7599 | // target but the current control destination is not marked as |
| 7600 | // used. |
| 7601 | destination()->Use(false); |
| 7602 | } |
| 7603 | |
| 7604 | } else if (dest.is_used()) { |
| 7605 | // The left subexpression compiled to control flow (and is_true |
| 7606 | // was just bound), so the right is free to do so as well. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7607 | LoadCondition(node->right(), destination(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7608 | |
| 7609 | } else { |
| 7610 | // We have a materialized value on the frame, so we exit with |
| 7611 | // one on all paths. There are possibly also jumps to is_true |
| 7612 | // from nested subexpressions. |
| 7613 | JumpTarget pop_and_continue; |
| 7614 | JumpTarget exit; |
| 7615 | |
| 7616 | // Avoid popping the result if it converts to 'false' using the |
| 7617 | // standard ToBoolean() conversion as described in ECMA-262, |
| 7618 | // section 9.2, page 30. |
| 7619 | // |
| 7620 | // Duplicate the TOS value. The duplicate will be popped by |
| 7621 | // ToBoolean. |
| 7622 | frame_->Dup(); |
| 7623 | ControlDestination dest(&pop_and_continue, &exit, true); |
| 7624 | ToBoolean(&dest); |
| 7625 | |
| 7626 | // Pop the result of evaluating the first part. |
| 7627 | frame_->Drop(); |
| 7628 | |
| 7629 | // Compile right side expression. |
| 7630 | is_true.Bind(); |
| 7631 | Load(node->right()); |
| 7632 | |
| 7633 | // Exit (always with a materialized value). |
| 7634 | exit.Bind(); |
| 7635 | } |
| 7636 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7637 | } else { |
| 7638 | ASSERT(node->op() == Token::OR); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7639 | JumpTarget is_false; |
| 7640 | ControlDestination dest(destination()->true_target(), &is_false, false); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7641 | LoadCondition(node->left(), &dest, false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7642 | |
| 7643 | if (dest.true_was_fall_through()) { |
| 7644 | // The current true target was used as the fall-through. If |
| 7645 | // there are no dangling jumps to is_false then the left |
| 7646 | // subexpression was unconditionally true. Otherwise we have |
| 7647 | // paths where we do have to evaluate the right subexpression. |
| 7648 | if (is_false.is_linked()) { |
| 7649 | // We need to compile the right subexpression. If the jump to |
| 7650 | // the current true target was a forward jump then we have a |
| 7651 | // valid frame, we have just bound the true target, and we |
| 7652 | // have to jump around the code for the right subexpression. |
| 7653 | if (has_valid_frame()) { |
| 7654 | destination()->true_target()->Unuse(); |
| 7655 | destination()->true_target()->Jump(); |
| 7656 | } |
| 7657 | is_false.Bind(); |
| 7658 | // The left subexpression compiled to control flow, so the |
| 7659 | // right one is free to do so as well. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7660 | LoadCondition(node->right(), destination(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7661 | } else { |
| 7662 | // We have just jumped to or bound the current true target but |
| 7663 | // the current control destination is not marked as used. |
| 7664 | destination()->Use(true); |
| 7665 | } |
| 7666 | |
| 7667 | } else if (dest.is_used()) { |
| 7668 | // The left subexpression compiled to control flow (and is_false |
| 7669 | // was just bound), so the right is free to do so as well. |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7670 | LoadCondition(node->right(), destination(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7671 | |
| 7672 | } else { |
| 7673 | // We have a materialized value on the frame, so we exit with |
| 7674 | // one on all paths. There are possibly also jumps to is_false |
| 7675 | // from nested subexpressions. |
| 7676 | JumpTarget pop_and_continue; |
| 7677 | JumpTarget exit; |
| 7678 | |
| 7679 | // Avoid popping the result if it converts to 'true' using the |
| 7680 | // standard ToBoolean() conversion as described in ECMA-262, |
| 7681 | // section 9.2, page 30. |
| 7682 | // |
| 7683 | // Duplicate the TOS value. The duplicate will be popped by |
| 7684 | // ToBoolean. |
| 7685 | frame_->Dup(); |
| 7686 | ControlDestination dest(&exit, &pop_and_continue, false); |
| 7687 | ToBoolean(&dest); |
| 7688 | |
| 7689 | // Pop the result of evaluating the first part. |
| 7690 | frame_->Drop(); |
| 7691 | |
| 7692 | // Compile right side expression. |
| 7693 | is_false.Bind(); |
| 7694 | Load(node->right()); |
| 7695 | |
| 7696 | // Exit (always with a materialized value). |
| 7697 | exit.Bind(); |
| 7698 | } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7699 | } |
| 7700 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7701 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7702 | void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { |
| 7703 | Comment cmnt(masm_, "[ BinaryOperation"); |
| 7704 | |
| 7705 | if (node->op() == Token::AND || node->op() == Token::OR) { |
| 7706 | GenerateLogicalBooleanOperation(node); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7707 | } else { |
| 7708 | // NOTE: The code below assumes that the slow cases (calls to runtime) |
| 7709 | // never return a constant/immutable object. |
| 7710 | OverwriteMode overwrite_mode = NO_OVERWRITE; |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 7711 | if (node->left()->ResultOverwriteAllowed()) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7712 | overwrite_mode = OVERWRITE_LEFT; |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 7713 | } else if (node->right()->ResultOverwriteAllowed()) { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7714 | overwrite_mode = OVERWRITE_RIGHT; |
| 7715 | } |
| 7716 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 7717 | if (node->left()->IsTrivial()) { |
| 7718 | Load(node->right()); |
| 7719 | Result right = frame_->Pop(); |
| 7720 | frame_->Push(node->left()); |
| 7721 | frame_->Push(&right); |
| 7722 | } else { |
| 7723 | Load(node->left()); |
| 7724 | Load(node->right()); |
| 7725 | } |
| 7726 | GenericBinaryOperation(node, overwrite_mode); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7727 | } |
| 7728 | } |
| 7729 | |
| 7730 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 7731 | void CodeGenerator::VisitThisFunction(ThisFunction* node) { |
| 7732 | frame_->PushFunction(); |
| 7733 | } |
| 7734 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7735 | |
| 7736 | void CodeGenerator::VisitCompareOperation(CompareOperation* node) { |
| 7737 | Comment cmnt(masm_, "[ CompareOperation"); |
| 7738 | |
| 7739 | // Get the expressions from the node. |
| 7740 | Expression* left = node->left(); |
| 7741 | Expression* right = node->right(); |
| 7742 | Token::Value op = node->op(); |
| 7743 | // To make typeof testing for natives implemented in JavaScript really |
| 7744 | // efficient, we generate special code for expressions of the form: |
| 7745 | // 'typeof <expression> == <string>'. |
| 7746 | UnaryOperation* operation = left->AsUnaryOperation(); |
| 7747 | if ((op == Token::EQ || op == Token::EQ_STRICT) && |
| 7748 | (operation != NULL && operation->op() == Token::TYPEOF) && |
| 7749 | (right->AsLiteral() != NULL && |
| 7750 | right->AsLiteral()->handle()->IsString())) { |
| 7751 | Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle())); |
| 7752 | |
| 7753 | // Load the operand and move it to a register. |
| 7754 | LoadTypeofExpression(operation->expression()); |
| 7755 | Result answer = frame_->Pop(); |
| 7756 | answer.ToRegister(); |
| 7757 | |
| 7758 | if (check->Equals(Heap::number_symbol())) { |
| 7759 | Condition is_smi = masm_->CheckSmi(answer.reg()); |
| 7760 | destination()->true_target()->Branch(is_smi); |
| 7761 | frame_->Spill(answer.reg()); |
| 7762 | __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); |
| 7763 | __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex); |
| 7764 | answer.Unuse(); |
| 7765 | destination()->Split(equal); |
| 7766 | |
| 7767 | } else if (check->Equals(Heap::string_symbol())) { |
| 7768 | Condition is_smi = masm_->CheckSmi(answer.reg()); |
| 7769 | destination()->false_target()->Branch(is_smi); |
| 7770 | |
| 7771 | // It can be an undetectable string object. |
| 7772 | __ movq(kScratchRegister, |
| 7773 | FieldOperand(answer.reg(), HeapObject::kMapOffset)); |
| 7774 | __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
| 7775 | Immediate(1 << Map::kIsUndetectable)); |
| 7776 | destination()->false_target()->Branch(not_zero); |
| 7777 | __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE); |
| 7778 | answer.Unuse(); |
| 7779 | destination()->Split(below); // Unsigned byte comparison needed. |
| 7780 | |
| 7781 | } else if (check->Equals(Heap::boolean_symbol())) { |
| 7782 | __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex); |
| 7783 | destination()->true_target()->Branch(equal); |
| 7784 | __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex); |
| 7785 | answer.Unuse(); |
| 7786 | destination()->Split(equal); |
| 7787 | |
| 7788 | } else if (check->Equals(Heap::undefined_symbol())) { |
| 7789 | __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex); |
| 7790 | destination()->true_target()->Branch(equal); |
| 7791 | |
| 7792 | Condition is_smi = masm_->CheckSmi(answer.reg()); |
| 7793 | destination()->false_target()->Branch(is_smi); |
| 7794 | |
| 7795 | // It can be an undetectable object. |
| 7796 | __ movq(kScratchRegister, |
| 7797 | FieldOperand(answer.reg(), HeapObject::kMapOffset)); |
| 7798 | __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
| 7799 | Immediate(1 << Map::kIsUndetectable)); |
| 7800 | answer.Unuse(); |
| 7801 | destination()->Split(not_zero); |
| 7802 | |
| 7803 | } else if (check->Equals(Heap::function_symbol())) { |
| 7804 | Condition is_smi = masm_->CheckSmi(answer.reg()); |
| 7805 | destination()->false_target()->Branch(is_smi); |
| 7806 | frame_->Spill(answer.reg()); |
| 7807 | __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7808 | destination()->true_target()->Branch(equal); |
| 7809 | // Regular expressions are callable so typeof == 'function'. |
| 7810 | __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7811 | answer.Unuse(); |
| 7812 | destination()->Split(equal); |
| 7813 | |
| 7814 | } else if (check->Equals(Heap::object_symbol())) { |
| 7815 | Condition is_smi = masm_->CheckSmi(answer.reg()); |
| 7816 | destination()->false_target()->Branch(is_smi); |
| 7817 | __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex); |
| 7818 | destination()->true_target()->Branch(equal); |
| 7819 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 7820 | // Regular expressions are typeof == 'function', not 'object'. |
| 7821 | __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister); |
| 7822 | destination()->false_target()->Branch(equal); |
| 7823 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7824 | // It can be an undetectable object. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7825 | __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), |
| 7826 | Immediate(1 << Map::kIsUndetectable)); |
| 7827 | destination()->false_target()->Branch(not_zero); |
| 7828 | __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); |
| 7829 | destination()->false_target()->Branch(below); |
| 7830 | __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); |
| 7831 | answer.Unuse(); |
| 7832 | destination()->Split(below_equal); |
| 7833 | } else { |
| 7834 | // Uncommon case: typeof testing against a string literal that is |
| 7835 | // never returned from the typeof operator. |
| 7836 | answer.Unuse(); |
| 7837 | destination()->Goto(false); |
| 7838 | } |
| 7839 | return; |
| 7840 | } |
| 7841 | |
| 7842 | Condition cc = no_condition; |
| 7843 | bool strict = false; |
| 7844 | switch (op) { |
| 7845 | case Token::EQ_STRICT: |
| 7846 | strict = true; |
| 7847 | // Fall through |
| 7848 | case Token::EQ: |
| 7849 | cc = equal; |
| 7850 | break; |
| 7851 | case Token::LT: |
| 7852 | cc = less; |
| 7853 | break; |
| 7854 | case Token::GT: |
| 7855 | cc = greater; |
| 7856 | break; |
| 7857 | case Token::LTE: |
| 7858 | cc = less_equal; |
| 7859 | break; |
| 7860 | case Token::GTE: |
| 7861 | cc = greater_equal; |
| 7862 | break; |
| 7863 | case Token::IN: { |
| 7864 | Load(left); |
| 7865 | Load(right); |
| 7866 | Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2); |
| 7867 | frame_->Push(&answer); // push the result |
| 7868 | return; |
| 7869 | } |
| 7870 | case Token::INSTANCEOF: { |
| 7871 | Load(left); |
| 7872 | Load(right); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 7873 | InstanceofStub stub(InstanceofStub::kNoFlags); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7874 | Result answer = frame_->CallStub(&stub, 2); |
| 7875 | answer.ToRegister(); |
| 7876 | __ testq(answer.reg(), answer.reg()); |
| 7877 | answer.Unuse(); |
| 7878 | destination()->Split(zero); |
| 7879 | return; |
| 7880 | } |
| 7881 | default: |
| 7882 | UNREACHABLE(); |
| 7883 | } |
Steve Block | 8defd9f | 2010-07-08 12:39:36 +0100 | [diff] [blame] | 7884 | |
| 7885 | if (left->IsTrivial()) { |
| 7886 | Load(right); |
| 7887 | Result right_result = frame_->Pop(); |
| 7888 | frame_->Push(left); |
| 7889 | frame_->Push(&right_result); |
| 7890 | } else { |
| 7891 | Load(left); |
| 7892 | Load(right); |
| 7893 | } |
| 7894 | |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 7895 | Comparison(node, cc, strict, destination()); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7896 | } |
| 7897 | |
| 7898 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 7899 | void CodeGenerator::VisitCompareToNull(CompareToNull* node) { |
| 7900 | Comment cmnt(masm_, "[ CompareToNull"); |
| 7901 | |
| 7902 | Load(node->expression()); |
| 7903 | Result operand = frame_->Pop(); |
| 7904 | operand.ToRegister(); |
| 7905 | __ CompareRoot(operand.reg(), Heap::kNullValueRootIndex); |
| 7906 | if (node->is_strict()) { |
| 7907 | operand.Unuse(); |
| 7908 | destination()->Split(equal); |
| 7909 | } else { |
| 7910 | // The 'null' value is only equal to 'undefined' if using non-strict |
| 7911 | // comparisons. |
| 7912 | destination()->true_target()->Branch(equal); |
| 7913 | __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex); |
| 7914 | destination()->true_target()->Branch(equal); |
| 7915 | Condition is_smi = masm_->CheckSmi(operand.reg()); |
| 7916 | destination()->false_target()->Branch(is_smi); |
| 7917 | |
| 7918 | // It can be an undetectable object. |
| 7919 | // Use a scratch register in preference to spilling operand.reg(). |
| 7920 | Result temp = allocator()->Allocate(); |
| 7921 | ASSERT(temp.is_valid()); |
| 7922 | __ movq(temp.reg(), |
| 7923 | FieldOperand(operand.reg(), HeapObject::kMapOffset)); |
| 7924 | __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset), |
| 7925 | Immediate(1 << Map::kIsUndetectable)); |
| 7926 | temp.Unuse(); |
| 7927 | operand.Unuse(); |
| 7928 | destination()->Split(not_zero); |
| 7929 | } |
| 7930 | } |
| 7931 | |
| 7932 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7933 | #ifdef DEBUG |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 7934 | bool CodeGenerator::HasValidEntryRegisters() { |
| 7935 | return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0)) |
| 7936 | && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0)) |
| 7937 | && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0)) |
| 7938 | && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0)) |
| 7939 | && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0)) |
| 7940 | && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0)) |
| 7941 | && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0)) |
| 7942 | && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0)) |
| 7943 | && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0)) |
| 7944 | && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0)); |
| 7945 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7946 | #endif |
Andrei Popescu | 402d937 | 2010-02-26 13:31:12 +0000 | [diff] [blame] | 7947 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7948 | |
| 7949 | |
| 7950 | // Emit a LoadIC call to get the value from receiver and leave it in |
| 7951 | // dst. The receiver register is restored after the call. |
| 7952 | class DeferredReferenceGetNamedValue: public DeferredCode { |
| 7953 | public: |
| 7954 | DeferredReferenceGetNamedValue(Register dst, |
| 7955 | Register receiver, |
| 7956 | Handle<String> name) |
| 7957 | : dst_(dst), receiver_(receiver), name_(name) { |
| 7958 | set_comment("[ DeferredReferenceGetNamedValue"); |
| 7959 | } |
| 7960 | |
| 7961 | virtual void Generate(); |
| 7962 | |
| 7963 | Label* patch_site() { return &patch_site_; } |
| 7964 | |
| 7965 | private: |
| 7966 | Label patch_site_; |
| 7967 | Register dst_; |
| 7968 | Register receiver_; |
| 7969 | Handle<String> name_; |
| 7970 | }; |
| 7971 | |
| 7972 | |
| 7973 | void DeferredReferenceGetNamedValue::Generate() { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 7974 | if (!receiver_.is(rax)) { |
| 7975 | __ movq(rax, receiver_); |
| 7976 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7977 | __ Move(rcx, name_); |
| 7978 | Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
| 7979 | __ Call(ic, RelocInfo::CODE_TARGET); |
| 7980 | // The call must be followed by a test rax instruction to indicate |
| 7981 | // that the inobject property case was inlined. |
| 7982 | // |
| 7983 | // Store the delta to the map check instruction here in the test |
| 7984 | // instruction. Use masm_-> instead of the __ macro since the |
| 7985 | // latter can't return a value. |
| 7986 | int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
| 7987 | // Here we use masm_-> instead of the __ macro because this is the |
| 7988 | // instruction that gets patched and coverage code gets in the way. |
| 7989 | masm_->testl(rax, Immediate(-delta_to_patch_site)); |
| 7990 | __ IncrementCounter(&Counters::named_load_inline_miss, 1); |
| 7991 | |
| 7992 | if (!dst_.is(rax)) __ movq(dst_, rax); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 7993 | } |
| 7994 | |
| 7995 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 7996 | class DeferredReferenceGetKeyedValue: public DeferredCode { |
| 7997 | public: |
| 7998 | explicit DeferredReferenceGetKeyedValue(Register dst, |
| 7999 | Register receiver, |
| 8000 | Register key) |
| 8001 | : dst_(dst), receiver_(receiver), key_(key) { |
| 8002 | set_comment("[ DeferredReferenceGetKeyedValue"); |
| 8003 | } |
| 8004 | |
| 8005 | virtual void Generate(); |
| 8006 | |
| 8007 | Label* patch_site() { return &patch_site_; } |
| 8008 | |
| 8009 | private: |
| 8010 | Label patch_site_; |
| 8011 | Register dst_; |
| 8012 | Register receiver_; |
| 8013 | Register key_; |
| 8014 | }; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8015 | |
| 8016 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8017 | void DeferredReferenceGetKeyedValue::Generate() { |
| 8018 | if (receiver_.is(rdx)) { |
| 8019 | if (!key_.is(rax)) { |
| 8020 | __ movq(rax, key_); |
| 8021 | } // else do nothing. |
| 8022 | } else if (receiver_.is(rax)) { |
| 8023 | if (key_.is(rdx)) { |
| 8024 | __ xchg(rax, rdx); |
| 8025 | } else if (key_.is(rax)) { |
| 8026 | __ movq(rdx, receiver_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8027 | } else { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8028 | __ movq(rdx, receiver_); |
| 8029 | __ movq(rax, key_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8030 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8031 | } else if (key_.is(rax)) { |
| 8032 | __ movq(rdx, receiver_); |
| 8033 | } else { |
| 8034 | __ movq(rax, key_); |
| 8035 | __ movq(rdx, receiver_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8036 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8037 | // Calculate the delta from the IC call instruction to the map check |
| 8038 | // movq instruction in the inlined version. This delta is stored in |
| 8039 | // a test(rax, delta) instruction after the call so that we can find |
| 8040 | // it in the IC initialization code and patch the movq instruction. |
| 8041 | // This means that we cannot allow test instructions after calls to |
| 8042 | // KeyedLoadIC stubs in other places. |
| 8043 | Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
| 8044 | __ Call(ic, RelocInfo::CODE_TARGET); |
| 8045 | // The delta from the start of the map-compare instruction to the |
| 8046 | // test instruction. We use masm_-> directly here instead of the __ |
| 8047 | // macro because the macro sometimes uses macro expansion to turn |
| 8048 | // into something that can't return a value. This is encountered |
| 8049 | // when doing generated code coverage tests. |
| 8050 | int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
| 8051 | // Here we use masm_-> instead of the __ macro because this is the |
| 8052 | // instruction that gets patched and coverage code gets in the way. |
| 8053 | // TODO(X64): Consider whether it's worth switching the test to a |
| 8054 | // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't |
| 8055 | // be generated normally. |
| 8056 | masm_->testl(rax, Immediate(-delta_to_patch_site)); |
| 8057 | __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8058 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8059 | if (!dst_.is(rax)) __ movq(dst_, rax); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8060 | } |
| 8061 | |
Kristian Monsen | 25f6136 | 2010-05-21 11:50:48 +0100 | [diff] [blame] | 8062 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8063 | class DeferredReferenceSetKeyedValue: public DeferredCode { |
| 8064 | public: |
| 8065 | DeferredReferenceSetKeyedValue(Register value, |
| 8066 | Register key, |
| 8067 | Register receiver) |
| 8068 | : value_(value), key_(key), receiver_(receiver) { |
| 8069 | set_comment("[ DeferredReferenceSetKeyedValue"); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8070 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8071 | |
| 8072 | virtual void Generate(); |
| 8073 | |
| 8074 | Label* patch_site() { return &patch_site_; } |
| 8075 | |
| 8076 | private: |
| 8077 | Register value_; |
| 8078 | Register key_; |
| 8079 | Register receiver_; |
| 8080 | Label patch_site_; |
| 8081 | }; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8082 | |
| 8083 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8084 | void DeferredReferenceSetKeyedValue::Generate() { |
| 8085 | __ IncrementCounter(&Counters::keyed_store_inline_miss, 1); |
| 8086 | // Move value, receiver, and key to registers rax, rdx, and rcx, as |
| 8087 | // the IC stub expects. |
| 8088 | // Move value to rax, using xchg if the receiver or key is in rax. |
| 8089 | if (!value_.is(rax)) { |
| 8090 | if (!receiver_.is(rax) && !key_.is(rax)) { |
| 8091 | __ movq(rax, value_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8092 | } else { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8093 | __ xchg(rax, value_); |
| 8094 | // Update receiver_ and key_ if they are affected by the swap. |
| 8095 | if (receiver_.is(rax)) { |
| 8096 | receiver_ = value_; |
| 8097 | } else if (receiver_.is(value_)) { |
| 8098 | receiver_ = rax; |
| 8099 | } |
| 8100 | if (key_.is(rax)) { |
| 8101 | key_ = value_; |
| 8102 | } else if (key_.is(value_)) { |
| 8103 | key_ = rax; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8104 | } |
| 8105 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8106 | } |
| 8107 | // Value is now in rax. Its original location is remembered in value_, |
| 8108 | // and the value is restored to value_ before returning. |
| 8109 | // The variables receiver_ and key_ are not preserved. |
| 8110 | // Move receiver and key to rdx and rcx, swapping if necessary. |
| 8111 | if (receiver_.is(rdx)) { |
| 8112 | if (!key_.is(rcx)) { |
| 8113 | __ movq(rcx, key_); |
| 8114 | } // Else everything is already in the right place. |
| 8115 | } else if (receiver_.is(rcx)) { |
| 8116 | if (key_.is(rdx)) { |
| 8117 | __ xchg(rcx, rdx); |
| 8118 | } else if (key_.is(rcx)) { |
| 8119 | __ movq(rdx, receiver_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8120 | } else { |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8121 | __ movq(rdx, receiver_); |
| 8122 | __ movq(rcx, key_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8123 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8124 | } else if (key_.is(rcx)) { |
| 8125 | __ movq(rdx, receiver_); |
| 8126 | } else { |
| 8127 | __ movq(rcx, key_); |
| 8128 | __ movq(rdx, receiver_); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8129 | } |
| 8130 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 8131 | // Call the IC stub. |
| 8132 | Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
| 8133 | __ Call(ic, RelocInfo::CODE_TARGET); |
| 8134 | // The delta from the start of the map-compare instructions (initial movq) |
| 8135 | // to the test instruction. We use masm_-> directly here instead of the |
| 8136 | // __ macro because the macro sometimes uses macro expansion to turn |
| 8137 | // into something that can't return a value. This is encountered |
| 8138 | // when doing generated code coverage tests. |
| 8139 | int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
| 8140 | // Here we use masm_-> instead of the __ macro because this is the |
| 8141 | // instruction that gets patched and coverage code gets in the way. |
| 8142 | masm_->testl(rax, Immediate(-delta_to_patch_site)); |
| 8143 | // Restore value (returned from store IC). |
| 8144 | if (!value_.is(rax)) __ movq(value_, rax); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8145 | } |
| 8146 | |
| 8147 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8148 | Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
| 8149 | #ifdef DEBUG |
| 8150 | int original_height = frame()->height(); |
| 8151 | #endif |
| 8152 | Result result; |
| 8153 | // Do not inline the inobject property case for loads from the global |
| 8154 | // object. Also do not inline for unoptimized code. This saves time |
| 8155 | // in the code generator. Unoptimized code is toplevel code or code |
| 8156 | // that is not in a loop. |
| 8157 | if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
| 8158 | Comment cmnt(masm(), "[ Load from named Property"); |
| 8159 | frame()->Push(name); |
| 8160 | |
| 8161 | RelocInfo::Mode mode = is_contextual |
| 8162 | ? RelocInfo::CODE_TARGET_CONTEXT |
| 8163 | : RelocInfo::CODE_TARGET; |
| 8164 | result = frame()->CallLoadIC(mode); |
| 8165 | // A test rax instruction following the call signals that the |
| 8166 | // inobject property case was inlined. Ensure that there is not |
| 8167 | // a test rax instruction here. |
| 8168 | __ nop(); |
| 8169 | } else { |
| 8170 | // Inline the inobject property case. |
| 8171 | Comment cmnt(masm(), "[ Inlined named property load"); |
| 8172 | Result receiver = frame()->Pop(); |
| 8173 | receiver.ToRegister(); |
| 8174 | result = allocator()->Allocate(); |
| 8175 | ASSERT(result.is_valid()); |
| 8176 | |
| 8177 | // Cannot use r12 for receiver, because that changes |
| 8178 | // the distance between a call and a fixup location, |
| 8179 | // due to a special encoding of r12 as r/m in a ModR/M byte. |
| 8180 | if (receiver.reg().is(r12)) { |
| 8181 | frame()->Spill(receiver.reg()); // It will be overwritten with result. |
| 8182 | // Swap receiver and value. |
| 8183 | __ movq(result.reg(), receiver.reg()); |
| 8184 | Result temp = receiver; |
| 8185 | receiver = result; |
| 8186 | result = temp; |
| 8187 | } |
| 8188 | |
| 8189 | DeferredReferenceGetNamedValue* deferred = |
| 8190 | new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name); |
| 8191 | |
| 8192 | // Check that the receiver is a heap object. |
| 8193 | __ JumpIfSmi(receiver.reg(), deferred->entry_label()); |
| 8194 | |
| 8195 | __ bind(deferred->patch_site()); |
| 8196 | // This is the map check instruction that will be patched (so we can't |
| 8197 | // use the double underscore macro that may insert instructions). |
| 8198 | // Initially use an invalid map to force a failure. |
| 8199 | masm()->Move(kScratchRegister, Factory::null_value()); |
| 8200 | masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
| 8201 | kScratchRegister); |
| 8202 | // This branch is always a forwards branch so it's always a fixed |
| 8203 | // size which allows the assert below to succeed and patching to work. |
| 8204 | // Don't use deferred->Branch(...), since that might add coverage code. |
| 8205 | masm()->j(not_equal, deferred->entry_label()); |
| 8206 | |
| 8207 | // The delta from the patch label to the load offset must be |
| 8208 | // statically known. |
| 8209 | ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == |
| 8210 | LoadIC::kOffsetToLoadInstruction); |
| 8211 | // The initial (invalid) offset has to be large enough to force |
| 8212 | // a 32-bit instruction encoding to allow patching with an |
| 8213 | // arbitrary offset. Use kMaxInt (minus kHeapObjectTag). |
| 8214 | int offset = kMaxInt; |
| 8215 | masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset)); |
| 8216 | |
| 8217 | __ IncrementCounter(&Counters::named_load_inline, 1); |
| 8218 | deferred->BindExit(); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8219 | } |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8220 | ASSERT(frame()->height() == original_height - 1); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8221 | return result; |
| 8222 | } |
| 8223 | |
| 8224 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8225 | Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { |
| 8226 | #ifdef DEBUG |
| 8227 | int expected_height = frame()->height() - (is_contextual ? 1 : 2); |
| 8228 | #endif |
| 8229 | |
| 8230 | Result result; |
| 8231 | if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
| 8232 | result = frame()->CallStoreIC(name, is_contextual); |
| 8233 | // A test rax instruction following the call signals that the inobject |
| 8234 | // property case was inlined. Ensure that there is not a test rax |
| 8235 | // instruction here. |
| 8236 | __ nop(); |
| 8237 | } else { |
| 8238 | // Inline the in-object property case. |
| 8239 | JumpTarget slow, done; |
| 8240 | Label patch_site; |
| 8241 | |
| 8242 | // Get the value and receiver from the stack. |
| 8243 | Result value = frame()->Pop(); |
| 8244 | value.ToRegister(); |
| 8245 | Result receiver = frame()->Pop(); |
| 8246 | receiver.ToRegister(); |
| 8247 | |
| 8248 | // Allocate result register. |
| 8249 | result = allocator()->Allocate(); |
| 8250 | ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid()); |
| 8251 | |
Ben Murdoch | bb769b2 | 2010-08-11 14:56:33 +0100 | [diff] [blame] | 8252 | // Cannot use r12 for receiver, because that changes |
| 8253 | // the distance between a call and a fixup location, |
| 8254 | // due to a special encoding of r12 as r/m in a ModR/M byte. |
| 8255 | if (receiver.reg().is(r12)) { |
| 8256 | frame()->Spill(receiver.reg()); // It will be overwritten with result. |
| 8257 | // Swap receiver and value. |
| 8258 | __ movq(result.reg(), receiver.reg()); |
| 8259 | Result temp = receiver; |
| 8260 | receiver = result; |
| 8261 | result = temp; |
| 8262 | } |
| 8263 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8264 | // Check that the receiver is a heap object. |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 8265 | Condition is_smi = masm()->CheckSmi(receiver.reg()); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8266 | slow.Branch(is_smi, &value, &receiver); |
| 8267 | |
| 8268 | // This is the map check instruction that will be patched. |
| 8269 | // Initially use an invalid map to force a failure. The exact |
| 8270 | // instruction sequence is important because we use the |
| 8271 | // kOffsetToStoreInstruction constant for patching. We avoid using |
| 8272 | // the __ macro for the following two instructions because it |
| 8273 | // might introduce extra instructions. |
| 8274 | __ bind(&patch_site); |
| 8275 | masm()->Move(kScratchRegister, Factory::null_value()); |
| 8276 | masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
| 8277 | kScratchRegister); |
| 8278 | // This branch is always a forwards branch so it's always a fixed size |
| 8279 | // which allows the assert below to succeed and patching to work. |
| 8280 | slow.Branch(not_equal, &value, &receiver); |
| 8281 | |
| 8282 | // The delta from the patch label to the store offset must be |
| 8283 | // statically known. |
| 8284 | ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) == |
| 8285 | StoreIC::kOffsetToStoreInstruction); |
| 8286 | |
| 8287 | // The initial (invalid) offset has to be large enough to force a 32-bit |
| 8288 | // instruction encoding to allow patching with an arbitrary offset. Use |
| 8289 | // kMaxInt (minus kHeapObjectTag). |
| 8290 | int offset = kMaxInt; |
| 8291 | __ movq(FieldOperand(receiver.reg(), offset), value.reg()); |
| 8292 | __ movq(result.reg(), value.reg()); |
| 8293 | |
| 8294 | // Allocate scratch register for write barrier. |
| 8295 | Result scratch = allocator()->Allocate(); |
| 8296 | ASSERT(scratch.is_valid()); |
| 8297 | |
| 8298 | // The write barrier clobbers all input registers, so spill the |
| 8299 | // receiver and the value. |
| 8300 | frame_->Spill(receiver.reg()); |
| 8301 | frame_->Spill(value.reg()); |
| 8302 | |
| 8303 | // If the receiver and the value share a register allocate a new |
| 8304 | // register for the receiver. |
| 8305 | if (receiver.reg().is(value.reg())) { |
| 8306 | receiver = allocator()->Allocate(); |
| 8307 | ASSERT(receiver.is_valid()); |
| 8308 | __ movq(receiver.reg(), value.reg()); |
| 8309 | } |
| 8310 | |
| 8311 | // Update the write barrier. To save instructions in the inlined |
| 8312 | // version we do not filter smis. |
| 8313 | Label skip_write_barrier; |
| 8314 | __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier); |
| 8315 | int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site); |
| 8316 | __ lea(scratch.reg(), Operand(receiver.reg(), offset)); |
| 8317 | __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg()); |
| 8318 | if (FLAG_debug_code) { |
| 8319 | __ movq(receiver.reg(), BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 8320 | __ movq(value.reg(), BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 8321 | __ movq(scratch.reg(), BitCast<int64_t>(kZapValue), RelocInfo::NONE); |
| 8322 | } |
| 8323 | __ bind(&skip_write_barrier); |
| 8324 | value.Unuse(); |
| 8325 | scratch.Unuse(); |
| 8326 | receiver.Unuse(); |
| 8327 | done.Jump(&result); |
| 8328 | |
| 8329 | slow.Bind(&value, &receiver); |
| 8330 | frame()->Push(&receiver); |
| 8331 | frame()->Push(&value); |
| 8332 | result = frame()->CallStoreIC(name, is_contextual); |
| 8333 | // Encode the offset to the map check instruction and the offset |
| 8334 | // to the write barrier store address computation in a test rax |
| 8335 | // instruction. |
| 8336 | int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site); |
| 8337 | __ testl(rax, |
| 8338 | Immediate((delta_to_record_write << 16) | delta_to_patch_site)); |
| 8339 | done.Bind(&result); |
| 8340 | } |
| 8341 | |
| 8342 | ASSERT_EQ(expected_height, frame()->height()); |
| 8343 | return result; |
| 8344 | } |
| 8345 | |
| 8346 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8347 | Result CodeGenerator::EmitKeyedLoad() { |
| 8348 | #ifdef DEBUG |
| 8349 | int original_height = frame()->height(); |
| 8350 | #endif |
| 8351 | Result result; |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8352 | // Inline array load code if inside of a loop. We do not know |
| 8353 | // the receiver map yet, so we initially generate the code with |
| 8354 | // a check against an invalid map. In the inline cache code, we |
| 8355 | // patch the map check if appropriate. |
| 8356 | if (loop_nesting() > 0) { |
| 8357 | Comment cmnt(masm_, "[ Inlined load from keyed Property"); |
| 8358 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8359 | // Use a fresh temporary to load the elements without destroying |
| 8360 | // the receiver which is needed for the deferred slow case. |
| 8361 | // Allocate the temporary early so that we use rax if it is free. |
| 8362 | Result elements = allocator()->Allocate(); |
| 8363 | ASSERT(elements.is_valid()); |
| 8364 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8365 | Result key = frame_->Pop(); |
| 8366 | Result receiver = frame_->Pop(); |
| 8367 | key.ToRegister(); |
| 8368 | receiver.ToRegister(); |
| 8369 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8370 | // If key and receiver are shared registers on the frame, their values will |
| 8371 | // be automatically saved and restored when going to deferred code. |
| 8372 | // The result is returned in elements, which is not shared. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8373 | DeferredReferenceGetKeyedValue* deferred = |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8374 | new DeferredReferenceGetKeyedValue(elements.reg(), |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8375 | receiver.reg(), |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8376 | key.reg()); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8377 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8378 | __ JumpIfSmi(receiver.reg(), deferred->entry_label()); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8379 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8380 | // Check that the receiver has the expected map. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8381 | // Initially, use an invalid map. The map is patched in the IC |
| 8382 | // initialization code. |
| 8383 | __ bind(deferred->patch_site()); |
| 8384 | // Use masm-> here instead of the double underscore macro since extra |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8385 | // coverage code can interfere with the patching. Do not use a load |
| 8386 | // from the root array to load null_value, since the load must be patched |
| 8387 | // with the expected receiver map, which is not in the root array. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8388 | masm_->movq(kScratchRegister, Factory::null_value(), |
| 8389 | RelocInfo::EMBEDDED_OBJECT); |
| 8390 | masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
| 8391 | kScratchRegister); |
| 8392 | deferred->Branch(not_equal); |
| 8393 | |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 8394 | __ JumpUnlessNonNegativeSmi(key.reg(), deferred->entry_label()); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8395 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 8396 | // Get the elements array from the receiver. |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8397 | __ movq(elements.reg(), |
| 8398 | FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 8399 | __ AssertFastElements(elements.reg()); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8400 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8401 | // Check that key is within bounds. |
| 8402 | __ SmiCompare(key.reg(), |
| 8403 | FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8404 | deferred->Branch(above_equal); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8405 | |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8406 | // Load and check that the result is not the hole. We could |
| 8407 | // reuse the index or elements register for the value. |
| 8408 | // |
| 8409 | // TODO(206): Consider whether it makes sense to try some |
| 8410 | // heuristic about which register to reuse. For example, if |
| 8411 | // one is rax, the we can reuse that one because the value |
| 8412 | // coming from the deferred code will be in rax. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8413 | SmiIndex index = |
| 8414 | masm_->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8415 | __ movq(elements.reg(), |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8416 | FieldOperand(elements.reg(), |
| 8417 | index.reg, |
| 8418 | index.scale, |
| 8419 | FixedArray::kHeaderSize)); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8420 | result = elements; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8421 | __ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8422 | deferred->Branch(equal); |
| 8423 | __ IncrementCounter(&Counters::keyed_load_inline, 1); |
| 8424 | |
| 8425 | deferred->BindExit(); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8426 | } else { |
| 8427 | Comment cmnt(masm_, "[ Load from keyed Property"); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8428 | result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8429 | // Make sure that we do not have a test instruction after the |
| 8430 | // call. A test instruction after the call is used to |
| 8431 | // indicate that we have generated an inline version of the |
| 8432 | // keyed load. The explicit nop instruction is here because |
| 8433 | // the push that follows might be peep-hole optimized away. |
| 8434 | __ nop(); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8435 | } |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8436 | ASSERT(frame()->height() == original_height - 2); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8437 | return result; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8438 | } |
| 8439 | |
| 8440 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8441 | Result CodeGenerator::EmitKeyedStore(StaticType* key_type) { |
| 8442 | #ifdef DEBUG |
| 8443 | int original_height = frame()->height(); |
| 8444 | #endif |
| 8445 | Result result; |
| 8446 | // Generate inlined version of the keyed store if the code is in a loop |
| 8447 | // and the key is likely to be a smi. |
| 8448 | if (loop_nesting() > 0 && key_type->IsLikelySmi()) { |
| 8449 | Comment cmnt(masm(), "[ Inlined store to keyed Property"); |
| 8450 | |
| 8451 | // Get the receiver, key and value into registers. |
| 8452 | result = frame()->Pop(); |
| 8453 | Result key = frame()->Pop(); |
| 8454 | Result receiver = frame()->Pop(); |
| 8455 | |
| 8456 | Result tmp = allocator_->Allocate(); |
| 8457 | ASSERT(tmp.is_valid()); |
| 8458 | Result tmp2 = allocator_->Allocate(); |
| 8459 | ASSERT(tmp2.is_valid()); |
| 8460 | |
| 8461 | // Determine whether the value is a constant before putting it in a |
| 8462 | // register. |
| 8463 | bool value_is_constant = result.is_constant(); |
| 8464 | |
| 8465 | // Make sure that value, key and receiver are in registers. |
| 8466 | result.ToRegister(); |
| 8467 | key.ToRegister(); |
| 8468 | receiver.ToRegister(); |
| 8469 | |
| 8470 | DeferredReferenceSetKeyedValue* deferred = |
| 8471 | new DeferredReferenceSetKeyedValue(result.reg(), |
| 8472 | key.reg(), |
| 8473 | receiver.reg()); |
| 8474 | |
| 8475 | // Check that the receiver is not a smi. |
| 8476 | __ JumpIfSmi(receiver.reg(), deferred->entry_label()); |
| 8477 | |
| 8478 | // Check that the key is a smi. |
| 8479 | if (!key.is_smi()) { |
| 8480 | __ JumpIfNotSmi(key.reg(), deferred->entry_label()); |
| 8481 | } else if (FLAG_debug_code) { |
| 8482 | __ AbortIfNotSmi(key.reg()); |
| 8483 | } |
| 8484 | |
| 8485 | // Check that the receiver is a JSArray. |
| 8486 | __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister); |
| 8487 | deferred->Branch(not_equal); |
| 8488 | |
| 8489 | // Check that the key is within bounds. Both the key and the length of |
| 8490 | // the JSArray are smis. Use unsigned comparison to handle negative keys. |
| 8491 | __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset), |
| 8492 | key.reg()); |
| 8493 | deferred->Branch(below_equal); |
| 8494 | |
| 8495 | // Get the elements array from the receiver and check that it is not a |
| 8496 | // dictionary. |
| 8497 | __ movq(tmp.reg(), |
| 8498 | FieldOperand(receiver.reg(), JSArray::kElementsOffset)); |
| 8499 | |
| 8500 | // Check whether it is possible to omit the write barrier. If the elements |
| 8501 | // array is in new space or the value written is a smi we can safely update |
| 8502 | // the elements array without write barrier. |
| 8503 | Label in_new_space; |
| 8504 | __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space); |
| 8505 | if (!value_is_constant) { |
| 8506 | __ JumpIfNotSmi(result.reg(), deferred->entry_label()); |
| 8507 | } |
| 8508 | |
| 8509 | __ bind(&in_new_space); |
| 8510 | // Bind the deferred code patch site to be able to locate the fixed |
| 8511 | // array map comparison. When debugging, we patch this comparison to |
| 8512 | // always fail so that we will hit the IC call in the deferred code |
| 8513 | // which will allow the debugger to break for fast case stores. |
| 8514 | __ bind(deferred->patch_site()); |
| 8515 | // Avoid using __ to ensure the distance from patch_site |
| 8516 | // to the map address is always the same. |
| 8517 | masm()->movq(kScratchRegister, Factory::fixed_array_map(), |
| 8518 | RelocInfo::EMBEDDED_OBJECT); |
| 8519 | __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
| 8520 | kScratchRegister); |
| 8521 | deferred->Branch(not_equal); |
| 8522 | |
| 8523 | // Store the value. |
| 8524 | SmiIndex index = |
| 8525 | masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); |
| 8526 | __ movq(FieldOperand(tmp.reg(), |
| 8527 | index.reg, |
| 8528 | index.scale, |
| 8529 | FixedArray::kHeaderSize), |
| 8530 | result.reg()); |
| 8531 | __ IncrementCounter(&Counters::keyed_store_inline, 1); |
| 8532 | |
| 8533 | deferred->BindExit(); |
| 8534 | } else { |
| 8535 | result = frame()->CallKeyedStoreIC(); |
| 8536 | // Make sure that we do not have a test instruction after the |
| 8537 | // call. A test instruction after the call is used to |
| 8538 | // indicate that we have generated an inline version of the |
| 8539 | // keyed store. |
| 8540 | __ nop(); |
| 8541 | } |
| 8542 | ASSERT(frame()->height() == original_height - 3); |
| 8543 | return result; |
| 8544 | } |
| 8545 | |
| 8546 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8547 | #undef __ |
| 8548 | #define __ ACCESS_MASM(masm) |
| 8549 | |
| 8550 | |
| 8551 | Handle<String> Reference::GetName() { |
| 8552 | ASSERT(type_ == NAMED); |
| 8553 | Property* property = expression_->AsProperty(); |
| 8554 | if (property == NULL) { |
| 8555 | // Global variable reference treated as a named property reference. |
| 8556 | VariableProxy* proxy = expression_->AsVariableProxy(); |
| 8557 | ASSERT(proxy->AsVariable() != NULL); |
| 8558 | ASSERT(proxy->AsVariable()->is_global()); |
| 8559 | return proxy->name(); |
| 8560 | } else { |
| 8561 | Literal* raw_name = property->key()->AsLiteral(); |
| 8562 | ASSERT(raw_name != NULL); |
| 8563 | return Handle<String>(String::cast(*raw_name->handle())); |
| 8564 | } |
| 8565 | } |
| 8566 | |
| 8567 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8568 | void Reference::GetValue() { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8569 | ASSERT(!cgen_->in_spilled_code()); |
| 8570 | ASSERT(cgen_->HasValidEntryRegisters()); |
| 8571 | ASSERT(!is_illegal()); |
| 8572 | MacroAssembler* masm = cgen_->masm(); |
| 8573 | |
| 8574 | // Record the source position for the property load. |
| 8575 | Property* property = expression_->AsProperty(); |
| 8576 | if (property != NULL) { |
| 8577 | cgen_->CodeForSourcePosition(property->position()); |
| 8578 | } |
| 8579 | |
| 8580 | switch (type_) { |
| 8581 | case SLOT: { |
| 8582 | Comment cmnt(masm, "[ Load from Slot"); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 8583 | Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8584 | ASSERT(slot != NULL); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8585 | cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8586 | break; |
| 8587 | } |
| 8588 | |
| 8589 | case NAMED: { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8590 | Variable* var = expression_->AsVariableProxy()->AsVariable(); |
| 8591 | bool is_global = var != NULL; |
| 8592 | ASSERT(!is_global || var->is_global()); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8593 | if (persist_after_get_) { |
| 8594 | cgen_->frame()->Dup(); |
| 8595 | } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8596 | Result result = cgen_->EmitNamedLoad(GetName(), is_global); |
| 8597 | cgen_->frame()->Push(&result); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8598 | break; |
| 8599 | } |
| 8600 | |
| 8601 | case KEYED: { |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8602 | // A load of a bare identifier (load from global) cannot be keyed. |
| 8603 | ASSERT(expression_->AsVariableProxy()->AsVariable() == NULL); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8604 | if (persist_after_get_) { |
| 8605 | cgen_->frame()->PushElementAt(1); |
| 8606 | cgen_->frame()->PushElementAt(1); |
| 8607 | } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8608 | Result value = cgen_->EmitKeyedLoad(); |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8609 | cgen_->frame()->Push(&value); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8610 | break; |
| 8611 | } |
| 8612 | |
| 8613 | default: |
| 8614 | UNREACHABLE(); |
| 8615 | } |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 8616 | |
| 8617 | if (!persist_after_get_) { |
| 8618 | set_unloaded(); |
| 8619 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8620 | } |
| 8621 | |
| 8622 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8623 | void Reference::TakeValue() { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8624 | // TODO(X64): This function is completely architecture independent. Move |
| 8625 | // it somewhere shared. |
| 8626 | |
| 8627 | // For non-constant frame-allocated slots, we invalidate the value in the |
| 8628 | // slot. For all others, we fall back on GetValue. |
| 8629 | ASSERT(!cgen_->in_spilled_code()); |
| 8630 | ASSERT(!is_illegal()); |
| 8631 | if (type_ != SLOT) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8632 | GetValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8633 | return; |
| 8634 | } |
| 8635 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 8636 | Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8637 | ASSERT(slot != NULL); |
| 8638 | if (slot->type() == Slot::LOOKUP || |
| 8639 | slot->type() == Slot::CONTEXT || |
| 8640 | slot->var()->mode() == Variable::CONST || |
| 8641 | slot->is_arguments()) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8642 | GetValue(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8643 | return; |
| 8644 | } |
| 8645 | |
| 8646 | // Only non-constant, frame-allocated parameters and locals can reach |
| 8647 | // here. Be careful not to use the optimizations for arguments |
| 8648 | // object access since it may not have been initialized yet. |
| 8649 | ASSERT(!slot->is_arguments()); |
| 8650 | if (slot->type() == Slot::PARAMETER) { |
| 8651 | cgen_->frame()->TakeParameterAt(slot->index()); |
| 8652 | } else { |
| 8653 | ASSERT(slot->type() == Slot::LOCAL); |
| 8654 | cgen_->frame()->TakeLocalAt(slot->index()); |
| 8655 | } |
Leon Clarke | d91b9f7 | 2010-01-27 17:25:45 +0000 | [diff] [blame] | 8656 | |
| 8657 | ASSERT(persist_after_get_); |
| 8658 | // Do not unload the reference, because it is used in SetValue. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8659 | } |
| 8660 | |
| 8661 | |
| 8662 | void Reference::SetValue(InitState init_state) { |
| 8663 | ASSERT(cgen_->HasValidEntryRegisters()); |
| 8664 | ASSERT(!is_illegal()); |
| 8665 | MacroAssembler* masm = cgen_->masm(); |
| 8666 | switch (type_) { |
| 8667 | case SLOT: { |
| 8668 | Comment cmnt(masm, "[ Store to Slot"); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 8669 | Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8670 | ASSERT(slot != NULL); |
| 8671 | cgen_->StoreToSlot(slot, init_state); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8672 | set_unloaded(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8673 | break; |
| 8674 | } |
| 8675 | |
| 8676 | case NAMED: { |
| 8677 | Comment cmnt(masm, "[ Store to named Property"); |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8678 | Result answer = cgen_->EmitNamedStore(GetName(), false); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8679 | cgen_->frame()->Push(&answer); |
Leon Clarke | 4515c47 | 2010-02-03 11:58:03 +0000 | [diff] [blame] | 8680 | set_unloaded(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8681 | break; |
| 8682 | } |
| 8683 | |
| 8684 | case KEYED: { |
| 8685 | Comment cmnt(masm, "[ Store to keyed Property"); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8686 | Property* property = expression()->AsProperty(); |
| 8687 | ASSERT(property != NULL); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8688 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8689 | Result answer = cgen_->EmitKeyedStore(property->key()->type()); |
| 8690 | cgen_->frame()->Push(&answer); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8691 | set_unloaded(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8692 | break; |
| 8693 | } |
| 8694 | |
Kristian Monsen | 50ef84f | 2010-07-29 15:18:00 +0100 | [diff] [blame] | 8695 | case UNLOADED: |
| 8696 | case ILLEGAL: |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8697 | UNREACHABLE(); |
| 8698 | } |
| 8699 | } |
| 8700 | |
| 8701 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 8702 | Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub, |
| 8703 | Result* left, |
| 8704 | Result* right) { |
| 8705 | if (stub->ArgsInRegistersSupported()) { |
| 8706 | stub->SetArgsInRegisters(); |
| 8707 | return frame_->CallStub(stub, left, right); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8708 | } else { |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 8709 | frame_->Push(left); |
| 8710 | frame_->Push(right); |
| 8711 | return frame_->CallStub(stub, 2); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8712 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8713 | } |
| 8714 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 8715 | #undef __ |
| 8716 | |
| 8717 | #define __ masm. |
| 8718 | |
| 8719 | #ifdef _WIN64 |
| 8720 | typedef double (*ModuloFunction)(double, double); |
| 8721 | // Define custom fmod implementation. |
| 8722 | ModuloFunction CreateModuloFunction() { |
| 8723 | size_t actual_size; |
| 8724 | byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize, |
| 8725 | &actual_size, |
| 8726 | true)); |
| 8727 | CHECK(buffer); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 8728 | Assembler masm(buffer, static_cast<int>(actual_size)); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 8729 | // Generated code is put into a fixed, unmovable, buffer, and not into |
| 8730 | // the V8 heap. We can't, and don't, refer to any relocatable addresses |
| 8731 | // (e.g. the JavaScript nan-object). |
| 8732 | |
| 8733 | // Windows 64 ABI passes double arguments in xmm0, xmm1 and |
| 8734 | // returns result in xmm0. |
| 8735 | // Argument backing space is allocated on the stack above |
| 8736 | // the return address. |
| 8737 | |
| 8738 | // Compute x mod y. |
| 8739 | // Load y and x (use argument backing store as temporary storage). |
| 8740 | __ movsd(Operand(rsp, kPointerSize * 2), xmm1); |
| 8741 | __ movsd(Operand(rsp, kPointerSize), xmm0); |
| 8742 | __ fld_d(Operand(rsp, kPointerSize * 2)); |
| 8743 | __ fld_d(Operand(rsp, kPointerSize)); |
| 8744 | |
| 8745 | // Clear exception flags before operation. |
| 8746 | { |
| 8747 | Label no_exceptions; |
| 8748 | __ fwait(); |
| 8749 | __ fnstsw_ax(); |
| 8750 | // Clear if Illegal Operand or Zero Division exceptions are set. |
| 8751 | __ testb(rax, Immediate(5)); |
| 8752 | __ j(zero, &no_exceptions); |
| 8753 | __ fnclex(); |
| 8754 | __ bind(&no_exceptions); |
| 8755 | } |
| 8756 | |
| 8757 | // Compute st(0) % st(1) |
| 8758 | { |
| 8759 | Label partial_remainder_loop; |
| 8760 | __ bind(&partial_remainder_loop); |
| 8761 | __ fprem(); |
| 8762 | __ fwait(); |
| 8763 | __ fnstsw_ax(); |
| 8764 | __ testl(rax, Immediate(0x400 /* C2 */)); |
| 8765 | // If C2 is set, computation only has partial result. Loop to |
| 8766 | // continue computation. |
| 8767 | __ j(not_zero, &partial_remainder_loop); |
| 8768 | } |
| 8769 | |
| 8770 | Label valid_result; |
| 8771 | Label return_result; |
| 8772 | // If Invalid Operand or Zero Division exceptions are set, |
| 8773 | // return NaN. |
| 8774 | __ testb(rax, Immediate(5)); |
| 8775 | __ j(zero, &valid_result); |
| 8776 | __ fstp(0); // Drop result in st(0). |
| 8777 | int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000); |
| 8778 | __ movq(rcx, kNaNValue, RelocInfo::NONE); |
| 8779 | __ movq(Operand(rsp, kPointerSize), rcx); |
| 8780 | __ movsd(xmm0, Operand(rsp, kPointerSize)); |
| 8781 | __ jmp(&return_result); |
| 8782 | |
| 8783 | // If result is valid, return that. |
| 8784 | __ bind(&valid_result); |
| 8785 | __ fstp_d(Operand(rsp, kPointerSize)); |
| 8786 | __ movsd(xmm0, Operand(rsp, kPointerSize)); |
| 8787 | |
| 8788 | // Clean up FPU stack and exceptions and return xmm0 |
| 8789 | __ bind(&return_result); |
| 8790 | __ fstp(0); // Unload y. |
| 8791 | |
| 8792 | Label clear_exceptions; |
| 8793 | __ testb(rax, Immediate(0x3f /* Any Exception*/)); |
| 8794 | __ j(not_zero, &clear_exceptions); |
| 8795 | __ ret(0); |
| 8796 | __ bind(&clear_exceptions); |
| 8797 | __ fnclex(); |
| 8798 | __ ret(0); |
| 8799 | |
| 8800 | CodeDesc desc; |
| 8801 | masm.GetCode(&desc); |
| 8802 | // Call the function from C++. |
| 8803 | return FUNCTION_CAST<ModuloFunction>(buffer); |
| 8804 | } |
| 8805 | |
| 8806 | #endif |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8807 | |
Leon Clarke | e46be81 | 2010-01-19 14:06:41 +0000 | [diff] [blame] | 8808 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 8809 | #undef __ |
| 8810 | |
| 8811 | } } // namespace v8::internal |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 8812 | |
| 8813 | #endif // V8_TARGET_ARCH_X64 |