Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #if V8_TARGET_ARCH_S390 |
| 6 | |
| 7 | #include "src/ast/scopes.h" |
| 8 | #include "src/code-factory.h" |
| 9 | #include "src/code-stubs.h" |
| 10 | #include "src/codegen.h" |
| 11 | #include "src/debug/debug.h" |
| 12 | #include "src/full-codegen/full-codegen.h" |
| 13 | #include "src/ic/ic.h" |
| 14 | #include "src/parsing/parser.h" |
| 15 | |
| 16 | #include "src/s390/code-stubs-s390.h" |
| 17 | #include "src/s390/macro-assembler-s390.h" |
| 18 | |
| 19 | namespace v8 { |
| 20 | namespace internal { |
| 21 | |
| 22 | #define __ ACCESS_MASM(masm()) |
| 23 | |
| 24 | // A patch site is a location in the code which it is possible to patch. This |
| 25 | // class has a number of methods to emit the code which is patchable and the |
| 26 | // method EmitPatchInfo to record a marker back to the patchable code. This |
| 27 | // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit |
| 28 | // immediate value is used) is the delta from the pc to the first instruction of |
| 29 | // the patchable code. |
| 30 | // See PatchInlinedSmiCode in ic-s390.cc for the code that patches it |
| 31 | class JumpPatchSite BASE_EMBEDDED { |
| 32 | public: |
| 33 | explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| 34 | #ifdef DEBUG |
| 35 | info_emitted_ = false; |
| 36 | #endif |
| 37 | } |
| 38 | |
| 39 | ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } |
| 40 | |
| 41 | // When initially emitting this ensure that a jump is always generated to skip |
| 42 | // the inlined smi code. |
| 43 | void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 44 | DCHECK(!patch_site_.is_bound() && !info_emitted_); |
| 45 | __ bind(&patch_site_); |
| 46 | __ CmpP(reg, reg); |
| 47 | // Emit the Nop to make bigger place for patching on 31-bit |
| 48 | // as the TestIfSmi sequence uses 4-byte TMLL |
| 49 | #ifndef V8_TARGET_ARCH_S390X |
| 50 | __ nop(); |
| 51 | #endif |
| 52 | __ beq(target); // Always taken before patched. |
| 53 | } |
| 54 | |
| 55 | // When initially emitting this ensure that a jump is never generated to skip |
| 56 | // the inlined smi code. |
| 57 | void EmitJumpIfSmi(Register reg, Label* target) { |
| 58 | DCHECK(!patch_site_.is_bound() && !info_emitted_); |
| 59 | __ bind(&patch_site_); |
| 60 | __ CmpP(reg, reg); |
| 61 | // Emit the Nop to make bigger place for patching on 31-bit |
| 62 | // as the TestIfSmi sequence uses 4-byte TMLL |
| 63 | #ifndef V8_TARGET_ARCH_S390X |
| 64 | __ nop(); |
| 65 | #endif |
| 66 | __ bne(target); // Never taken before patched. |
| 67 | } |
| 68 | |
| 69 | void EmitPatchInfo() { |
| 70 | if (patch_site_.is_bound()) { |
| 71 | int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| 72 | DCHECK(is_int16(delta_to_patch_site)); |
| 73 | __ chi(r0, Operand(delta_to_patch_site)); |
| 74 | #ifdef DEBUG |
| 75 | info_emitted_ = true; |
| 76 | #endif |
| 77 | } else { |
| 78 | __ nop(); |
| 79 | __ nop(); |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | private: |
| 84 | MacroAssembler* masm() { return masm_; } |
| 85 | MacroAssembler* masm_; |
| 86 | Label patch_site_; |
| 87 | #ifdef DEBUG |
| 88 | bool info_emitted_; |
| 89 | #endif |
| 90 | }; |
| 91 | |
| 92 | // Generate code for a JS function. On entry to the function the receiver |
| 93 | // and arguments have been pushed on the stack left to right. The actual |
| 94 | // argument count matches the formal parameter count expected by the |
| 95 | // function. |
| 96 | // |
| 97 | // The live registers are: |
| 98 | // o r3: the JS function object being called (i.e., ourselves) |
| 99 | // o r5: the new target value |
| 100 | // o cp: our context |
| 101 | // o fp: our caller's frame pointer |
| 102 | // o sp: stack pointer |
| 103 | // o lr: return address |
| 104 | // o ip: our own function entry (required by the prologue) |
| 105 | // |
| 106 | // The function builds a JS frame. Please see JavaScriptFrameConstants in |
| 107 | // frames-s390.h for its layout. |
| 108 | void FullCodeGenerator::Generate() { |
| 109 | CompilationInfo* info = info_; |
| 110 | profiling_counter_ = isolate()->factory()->NewCell( |
| 111 | Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); |
| 112 | SetFunctionPosition(literal()); |
| 113 | Comment cmnt(masm_, "[ function compiled by full code generator"); |
| 114 | |
| 115 | ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 116 | |
| 117 | if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { |
| 118 | int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
| 119 | __ LoadP(r4, MemOperand(sp, receiver_offset), r0); |
| 120 | __ AssertNotSmi(r4); |
| 121 | __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE); |
| 122 | __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); |
| 123 | } |
| 124 | |
| 125 | // Open a frame scope to indicate that there is a frame on the stack. The |
| 126 | // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 127 | // the frame (that is done below). |
| 128 | FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 129 | int prologue_offset = masm_->pc_offset(); |
| 130 | |
| 131 | info->set_prologue_offset(prologue_offset); |
| 132 | __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); |
| 133 | |
| 134 | { |
| 135 | Comment cmnt(masm_, "[ Allocate locals"); |
| 136 | int locals_count = info->scope()->num_stack_slots(); |
| 137 | // Generators allocate locals, if any, in context slots. |
| 138 | DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); |
| 139 | OperandStackDepthIncrement(locals_count); |
| 140 | if (locals_count > 0) { |
| 141 | if (locals_count >= 128) { |
| 142 | Label ok; |
| 143 | __ AddP(ip, sp, Operand(-(locals_count * kPointerSize))); |
| 144 | __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
| 145 | __ CmpLogicalP(ip, r5); |
| 146 | __ bge(&ok, Label::kNear); |
| 147 | __ CallRuntime(Runtime::kThrowStackOverflow); |
| 148 | __ bind(&ok); |
| 149 | } |
| 150 | __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 151 | int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; |
| 152 | if (locals_count >= kMaxPushes) { |
| 153 | int loop_iterations = locals_count / kMaxPushes; |
| 154 | __ mov(r4, Operand(loop_iterations)); |
| 155 | Label loop_header; |
| 156 | __ bind(&loop_header); |
| 157 | // Do pushes. |
| 158 | // TODO(joransiu): Use MVC for better performance |
| 159 | __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize)); |
| 160 | for (int i = 0; i < kMaxPushes; i++) { |
| 161 | __ StoreP(ip, MemOperand(sp, i * kPointerSize)); |
| 162 | } |
| 163 | // Continue loop if not done. |
| 164 | __ BranchOnCount(r4, &loop_header); |
| 165 | } |
| 166 | int remaining = locals_count % kMaxPushes; |
| 167 | // Emit the remaining pushes. |
| 168 | // TODO(joransiu): Use MVC for better performance |
| 169 | if (remaining > 0) { |
| 170 | __ lay(sp, MemOperand(sp, -remaining * kPointerSize)); |
| 171 | for (int i = 0; i < remaining; i++) { |
| 172 | __ StoreP(ip, MemOperand(sp, i * kPointerSize)); |
| 173 | } |
| 174 | } |
| 175 | } |
| 176 | } |
| 177 | |
| 178 | bool function_in_register_r3 = true; |
| 179 | |
| 180 | // Possibly allocate a local context. |
| 181 | if (info->scope()->num_heap_slots() > 0) { |
| 182 | // Argument to NewContext is the function, which is still in r3. |
| 183 | Comment cmnt(masm_, "[ Allocate context"); |
| 184 | bool need_write_barrier = true; |
| 185 | int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 186 | if (info->scope()->is_script_scope()) { |
| 187 | __ push(r3); |
| 188 | __ Push(info->scope()->GetScopeInfo(info->isolate())); |
| 189 | __ CallRuntime(Runtime::kNewScriptContext); |
| 190 | PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); |
| 191 | // The new target value is not used, clobbering is safe. |
| 192 | DCHECK_NULL(info->scope()->new_target_var()); |
| 193 | } else { |
| 194 | if (info->scope()->new_target_var() != nullptr) { |
| 195 | __ push(r5); // Preserve new target. |
| 196 | } |
| 197 | if (slots <= FastNewContextStub::kMaximumSlots) { |
| 198 | FastNewContextStub stub(isolate(), slots); |
| 199 | __ CallStub(&stub); |
| 200 | // Result of FastNewContextStub is always in new space. |
| 201 | need_write_barrier = false; |
| 202 | } else { |
| 203 | __ push(r3); |
| 204 | __ CallRuntime(Runtime::kNewFunctionContext); |
| 205 | } |
| 206 | if (info->scope()->new_target_var() != nullptr) { |
| 207 | __ pop(r5); // Preserve new target. |
| 208 | } |
| 209 | } |
| 210 | function_in_register_r3 = false; |
| 211 | // Context is returned in r2. It replaces the context passed to us. |
| 212 | // It's saved in the stack and kept live in cp. |
| 213 | __ LoadRR(cp, r2); |
| 214 | __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 215 | // Copy any necessary parameters into the context. |
| 216 | int num_parameters = info->scope()->num_parameters(); |
| 217 | int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; |
| 218 | for (int i = first_parameter; i < num_parameters; i++) { |
| 219 | Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); |
| 220 | if (var->IsContextSlot()) { |
| 221 | int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 222 | (num_parameters - 1 - i) * kPointerSize; |
| 223 | // Load parameter from stack. |
| 224 | __ LoadP(r2, MemOperand(fp, parameter_offset), r0); |
| 225 | // Store it in the context. |
| 226 | MemOperand target = ContextMemOperand(cp, var->index()); |
| 227 | __ StoreP(r2, target); |
| 228 | |
| 229 | // Update the write barrier. |
| 230 | if (need_write_barrier) { |
| 231 | __ RecordWriteContextSlot(cp, target.offset(), r2, r4, |
| 232 | kLRHasBeenSaved, kDontSaveFPRegs); |
| 233 | } else if (FLAG_debug_code) { |
| 234 | Label done; |
| 235 | __ JumpIfInNewSpace(cp, r2, &done); |
| 236 | __ Abort(kExpectedNewSpaceObject); |
| 237 | __ bind(&done); |
| 238 | } |
| 239 | } |
| 240 | } |
| 241 | } |
| 242 | |
| 243 | // Register holding this function and new target are both trashed in case we |
| 244 | // bailout here. But since that can happen only when new target is not used |
| 245 | // and we allocate a context, the value of |function_in_register| is correct. |
| 246 | PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); |
| 247 | |
| 248 | // Possibly set up a local binding to the this function which is used in |
| 249 | // derived constructors with super calls. |
| 250 | Variable* this_function_var = scope()->this_function_var(); |
| 251 | if (this_function_var != nullptr) { |
| 252 | Comment cmnt(masm_, "[ This function"); |
| 253 | if (!function_in_register_r3) { |
| 254 | __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 255 | // The write barrier clobbers register again, keep it marked as such. |
| 256 | } |
| 257 | SetVar(this_function_var, r3, r2, r4); |
| 258 | } |
| 259 | |
| 260 | // Possibly set up a local binding to the new target value. |
| 261 | Variable* new_target_var = scope()->new_target_var(); |
| 262 | if (new_target_var != nullptr) { |
| 263 | Comment cmnt(masm_, "[ new.target"); |
| 264 | SetVar(new_target_var, r5, r2, r4); |
| 265 | } |
| 266 | |
| 267 | // Possibly allocate RestParameters |
| 268 | int rest_index; |
| 269 | Variable* rest_param = scope()->rest_parameter(&rest_index); |
| 270 | if (rest_param) { |
| 271 | Comment cmnt(masm_, "[ Allocate rest parameter array"); |
| 272 | |
| 273 | if (!function_in_register_r3) { |
| 274 | __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 275 | } |
| 276 | FastNewRestParameterStub stub(isolate()); |
| 277 | __ CallStub(&stub); |
| 278 | |
| 279 | function_in_register_r3 = false; |
| 280 | SetVar(rest_param, r2, r3, r4); |
| 281 | } |
| 282 | |
| 283 | Variable* arguments = scope()->arguments(); |
| 284 | if (arguments != NULL) { |
| 285 | // Function uses arguments object. |
| 286 | Comment cmnt(masm_, "[ Allocate arguments object"); |
| 287 | if (!function_in_register_r3) { |
| 288 | // Load this again, if it's used by the local context below. |
| 289 | __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 290 | } |
| 291 | if (is_strict(language_mode()) || !has_simple_parameters()) { |
| 292 | FastNewStrictArgumentsStub stub(isolate()); |
| 293 | __ CallStub(&stub); |
| 294 | } else if (literal()->has_duplicate_parameters()) { |
| 295 | __ Push(r3); |
| 296 | __ CallRuntime(Runtime::kNewSloppyArguments_Generic); |
| 297 | } else { |
| 298 | FastNewSloppyArgumentsStub stub(isolate()); |
| 299 | __ CallStub(&stub); |
| 300 | } |
| 301 | |
| 302 | SetVar(arguments, r2, r3, r4); |
| 303 | } |
| 304 | |
| 305 | if (FLAG_trace) { |
| 306 | __ CallRuntime(Runtime::kTraceEnter); |
| 307 | } |
| 308 | |
| 309 | // Visit the declarations and body. |
| 310 | PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); |
| 311 | { |
| 312 | Comment cmnt(masm_, "[ Declarations"); |
| 313 | VisitDeclarations(scope()->declarations()); |
| 314 | } |
| 315 | |
| 316 | // Assert that the declarations do not use ICs. Otherwise the debugger |
| 317 | // won't be able to redirect a PC at an IC to the correct IC in newly |
| 318 | // recompiled code. |
| 319 | DCHECK_EQ(0, ic_total_count_); |
| 320 | |
| 321 | { |
| 322 | Comment cmnt(masm_, "[ Stack check"); |
| 323 | PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
| 324 | Label ok; |
| 325 | __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 326 | __ CmpLogicalP(sp, ip); |
| 327 | __ bge(&ok, Label::kNear); |
| 328 | __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); |
| 329 | __ bind(&ok); |
| 330 | } |
| 331 | |
| 332 | { |
| 333 | Comment cmnt(masm_, "[ Body"); |
| 334 | DCHECK(loop_depth() == 0); |
| 335 | VisitStatements(literal()->body()); |
| 336 | DCHECK(loop_depth() == 0); |
| 337 | } |
| 338 | |
| 339 | // Always emit a 'return undefined' in case control fell off the end of |
| 340 | // the body. |
| 341 | { |
| 342 | Comment cmnt(masm_, "[ return <undefined>;"); |
| 343 | __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 344 | } |
| 345 | EmitReturnSequence(); |
| 346 | } |
| 347 | |
| 348 | void FullCodeGenerator::ClearAccumulator() { |
| 349 | __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 350 | } |
| 351 | |
| 352 | void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 353 | __ mov(r4, Operand(profiling_counter_)); |
| 354 | intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta)); |
| 355 | if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) { |
| 356 | __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta)); |
| 357 | __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 358 | } else { |
| 359 | __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 360 | __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0); |
| 361 | __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 362 | } |
| 363 | } |
| 364 | |
| 365 | void FullCodeGenerator::EmitProfilingCounterReset() { |
| 366 | int reset_value = FLAG_interrupt_budget; |
| 367 | __ mov(r4, Operand(profiling_counter_)); |
| 368 | __ LoadSmiLiteral(r5, Smi::FromInt(reset_value)); |
| 369 | __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 370 | } |
| 371 | |
| 372 | void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
| 373 | Label* back_edge_target) { |
| 374 | Comment cmnt(masm_, "[ Back edge bookkeeping"); |
| 375 | Label ok; |
| 376 | |
| 377 | DCHECK(back_edge_target->is_bound()); |
| 378 | int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + |
| 379 | kCodeSizeMultiplier / 2; |
| 380 | int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 381 | EmitProfilingCounterDecrement(weight); |
| 382 | { |
| 383 | // BackEdgeTable::PatchAt manipulates this sequence. |
| 384 | __ bge(&ok, Label::kNear); |
| 385 | __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 386 | |
| 387 | // Record a mapping of this PC offset to the OSR id. This is used to find |
| 388 | // the AST id from the unoptimized code in order to use it as a key into |
| 389 | // the deoptimization input data found in the optimized code. |
| 390 | RecordBackEdge(stmt->OsrEntryId()); |
| 391 | } |
| 392 | EmitProfilingCounterReset(); |
| 393 | |
| 394 | __ bind(&ok); |
| 395 | PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 396 | // Record a mapping of the OSR id to this PC. This is used if the OSR |
| 397 | // entry becomes the target of a bailout. We don't expect it to be, but |
| 398 | // we want it to work if it is. |
| 399 | PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
| 400 | } |
| 401 | |
| 402 | void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( |
| 403 | bool is_tail_call) { |
| 404 | // Pretend that the exit is a backwards jump to the entry. |
| 405 | int weight = 1; |
| 406 | if (info_->ShouldSelfOptimize()) { |
| 407 | weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 408 | } else { |
| 409 | int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; |
| 410 | weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 411 | } |
| 412 | EmitProfilingCounterDecrement(weight); |
| 413 | Label ok; |
| 414 | __ CmpP(r5, Operand::Zero()); |
| 415 | __ bge(&ok); |
| 416 | // Don't need to save result register if we are going to do a tail call. |
| 417 | if (!is_tail_call) { |
| 418 | __ push(r2); |
| 419 | } |
| 420 | __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 421 | if (!is_tail_call) { |
| 422 | __ pop(r2); |
| 423 | } |
| 424 | EmitProfilingCounterReset(); |
| 425 | __ bind(&ok); |
| 426 | } |
| 427 | |
| 428 | void FullCodeGenerator::EmitReturnSequence() { |
| 429 | Comment cmnt(masm_, "[ Return sequence"); |
| 430 | if (return_label_.is_bound()) { |
| 431 | __ b(&return_label_); |
| 432 | } else { |
| 433 | __ bind(&return_label_); |
| 434 | if (FLAG_trace) { |
| 435 | // Push the return value on the stack as the parameter. |
| 436 | // Runtime::TraceExit returns its parameter in r2 |
| 437 | __ push(r2); |
| 438 | __ CallRuntime(Runtime::kTraceExit); |
| 439 | } |
| 440 | EmitProfilingCounterHandlingForReturnSequence(false); |
| 441 | |
| 442 | // Make sure that the constant pool is not emitted inside of the return |
| 443 | // sequence. |
| 444 | { |
| 445 | // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| 446 | // tool from instrumenting as we rely on the code size here. |
| 447 | int32_t arg_count = info_->scope()->num_parameters() + 1; |
| 448 | int32_t sp_delta = arg_count * kPointerSize; |
| 449 | SetReturnPosition(literal()); |
| 450 | __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); |
| 451 | |
| 452 | __ Ret(); |
| 453 | } |
| 454 | } |
| 455 | } |
| 456 | |
| 457 | void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
| 458 | DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 459 | codegen()->GetVar(result_register(), var); |
| 460 | codegen()->PushOperand(result_register()); |
| 461 | } |
| 462 | |
| 463 | void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} |
| 464 | |
| 465 | void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 466 | Heap::RootListIndex index) const { |
| 467 | __ LoadRoot(result_register(), index); |
| 468 | } |
| 469 | |
| 470 | void FullCodeGenerator::StackValueContext::Plug( |
| 471 | Heap::RootListIndex index) const { |
| 472 | __ LoadRoot(result_register(), index); |
| 473 | codegen()->PushOperand(result_register()); |
| 474 | } |
| 475 | |
| 476 | void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { |
| 477 | codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 478 | false_label_); |
| 479 | if (index == Heap::kUndefinedValueRootIndex || |
| 480 | index == Heap::kNullValueRootIndex || |
| 481 | index == Heap::kFalseValueRootIndex) { |
| 482 | if (false_label_ != fall_through_) __ b(false_label_); |
| 483 | } else if (index == Heap::kTrueValueRootIndex) { |
| 484 | if (true_label_ != fall_through_) __ b(true_label_); |
| 485 | } else { |
| 486 | __ LoadRoot(result_register(), index); |
| 487 | codegen()->DoTest(this); |
| 488 | } |
| 489 | } |
| 490 | |
| 491 | void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} |
| 492 | |
| 493 | void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 494 | Handle<Object> lit) const { |
| 495 | __ mov(result_register(), Operand(lit)); |
| 496 | } |
| 497 | |
| 498 | void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { |
| 499 | // Immediates cannot be pushed directly. |
| 500 | __ mov(result_register(), Operand(lit)); |
| 501 | codegen()->PushOperand(result_register()); |
| 502 | } |
| 503 | |
| 504 | void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { |
| 505 | codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 506 | false_label_); |
| 507 | DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable()); |
| 508 | if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { |
| 509 | if (false_label_ != fall_through_) __ b(false_label_); |
| 510 | } else if (lit->IsTrue() || lit->IsJSObject()) { |
| 511 | if (true_label_ != fall_through_) __ b(true_label_); |
| 512 | } else if (lit->IsString()) { |
| 513 | if (String::cast(*lit)->length() == 0) { |
| 514 | if (false_label_ != fall_through_) __ b(false_label_); |
| 515 | } else { |
| 516 | if (true_label_ != fall_through_) __ b(true_label_); |
| 517 | } |
| 518 | } else if (lit->IsSmi()) { |
| 519 | if (Smi::cast(*lit)->value() == 0) { |
| 520 | if (false_label_ != fall_through_) __ b(false_label_); |
| 521 | } else { |
| 522 | if (true_label_ != fall_through_) __ b(true_label_); |
| 523 | } |
| 524 | } else { |
| 525 | // For simplicity we always test the accumulator register. |
| 526 | __ mov(result_register(), Operand(lit)); |
| 527 | codegen()->DoTest(this); |
| 528 | } |
| 529 | } |
| 530 | |
| 531 | void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
| 532 | Register reg) const { |
| 533 | DCHECK(count > 0); |
| 534 | if (count > 1) codegen()->DropOperands(count - 1); |
| 535 | __ StoreP(reg, MemOperand(sp, 0)); |
| 536 | } |
| 537 | |
| 538 | void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, |
| 539 | Label* materialize_false) const { |
| 540 | DCHECK(materialize_true == materialize_false); |
| 541 | __ bind(materialize_true); |
| 542 | } |
| 543 | |
| 544 | void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 545 | Label* materialize_true, Label* materialize_false) const { |
| 546 | Label done; |
| 547 | __ bind(materialize_true); |
| 548 | __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); |
| 549 | __ b(&done, Label::kNear); |
| 550 | __ bind(materialize_false); |
| 551 | __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); |
| 552 | __ bind(&done); |
| 553 | } |
| 554 | |
| 555 | void FullCodeGenerator::StackValueContext::Plug( |
| 556 | Label* materialize_true, Label* materialize_false) const { |
| 557 | Label done; |
| 558 | __ bind(materialize_true); |
| 559 | __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 560 | __ b(&done, Label::kNear); |
| 561 | __ bind(materialize_false); |
| 562 | __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 563 | __ bind(&done); |
| 564 | codegen()->PushOperand(ip); |
| 565 | } |
| 566 | |
| 567 | void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
| 568 | Label* materialize_false) const { |
| 569 | DCHECK(materialize_true == true_label_); |
| 570 | DCHECK(materialize_false == false_label_); |
| 571 | } |
| 572 | |
| 573 | void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { |
| 574 | Heap::RootListIndex value_root_index = |
| 575 | flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 576 | __ LoadRoot(result_register(), value_root_index); |
| 577 | } |
| 578 | |
| 579 | void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
| 580 | Heap::RootListIndex value_root_index = |
| 581 | flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 582 | __ LoadRoot(ip, value_root_index); |
| 583 | codegen()->PushOperand(ip); |
| 584 | } |
| 585 | |
| 586 | void FullCodeGenerator::TestContext::Plug(bool flag) const { |
| 587 | codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 588 | false_label_); |
| 589 | if (flag) { |
| 590 | if (true_label_ != fall_through_) __ b(true_label_); |
| 591 | } else { |
| 592 | if (false_label_ != fall_through_) __ b(false_label_); |
| 593 | } |
| 594 | } |
| 595 | |
| 596 | void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, |
| 597 | Label* if_false, Label* fall_through) { |
| 598 | Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); |
| 599 | CallIC(ic, condition->test_id()); |
| 600 | __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); |
| 601 | Split(eq, if_true, if_false, fall_through); |
| 602 | } |
| 603 | |
| 604 | void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, |
| 605 | Label* fall_through) { |
| 606 | if (if_false == fall_through) { |
| 607 | __ b(cond, if_true); |
| 608 | } else if (if_true == fall_through) { |
| 609 | __ b(NegateCondition(cond), if_false); |
| 610 | } else { |
| 611 | __ b(cond, if_true); |
| 612 | __ b(if_false); |
| 613 | } |
| 614 | } |
| 615 | |
| 616 | MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
| 617 | DCHECK(var->IsStackAllocated()); |
| 618 | // Offset is negative because higher indexes are at lower addresses. |
| 619 | int offset = -var->index() * kPointerSize; |
| 620 | // Adjust by a (parameter or local) base offset. |
| 621 | if (var->IsParameter()) { |
| 622 | offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
| 623 | } else { |
| 624 | offset += JavaScriptFrameConstants::kLocal0Offset; |
| 625 | } |
| 626 | return MemOperand(fp, offset); |
| 627 | } |
| 628 | |
| 629 | MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
| 630 | DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
| 631 | if (var->IsContextSlot()) { |
| 632 | int context_chain_length = scope()->ContextChainLength(var->scope()); |
| 633 | __ LoadContext(scratch, context_chain_length); |
| 634 | return ContextMemOperand(scratch, var->index()); |
| 635 | } else { |
| 636 | return StackOperand(var); |
| 637 | } |
| 638 | } |
| 639 | |
| 640 | void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
| 641 | // Use destination as scratch. |
| 642 | MemOperand location = VarOperand(var, dest); |
| 643 | __ LoadP(dest, location, r0); |
| 644 | } |
| 645 | |
| 646 | void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, |
| 647 | Register scratch1) { |
| 648 | DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
| 649 | DCHECK(!scratch0.is(src)); |
| 650 | DCHECK(!scratch0.is(scratch1)); |
| 651 | DCHECK(!scratch1.is(src)); |
| 652 | MemOperand location = VarOperand(var, scratch0); |
| 653 | __ StoreP(src, location); |
| 654 | |
| 655 | // Emit the write barrier code if the location is in the heap. |
| 656 | if (var->IsContextSlot()) { |
| 657 | __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, |
| 658 | kLRHasBeenSaved, kDontSaveFPRegs); |
| 659 | } |
| 660 | } |
| 661 | |
| 662 | void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, |
| 663 | bool should_normalize, |
| 664 | Label* if_true, |
| 665 | Label* if_false) { |
| 666 | // Only prepare for bailouts before splits if we're in a test |
| 667 | // context. Otherwise, we let the Visit function deal with the |
| 668 | // preparation to avoid preparing with the same AST id twice. |
| 669 | if (!context()->IsTest()) return; |
| 670 | |
| 671 | Label skip; |
| 672 | if (should_normalize) __ b(&skip); |
| 673 | PrepareForBailout(expr, TOS_REG); |
| 674 | if (should_normalize) { |
| 675 | __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 676 | Split(eq, if_true, if_false, NULL); |
| 677 | __ bind(&skip); |
| 678 | } |
| 679 | } |
| 680 | |
| 681 | void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
| 682 | // The variable in the declaration always resides in the current function |
| 683 | // context. |
| 684 | DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); |
| 685 | if (FLAG_debug_code) { |
| 686 | // Check that we're not inside a with or catch context. |
| 687 | __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset)); |
| 688 | __ CompareRoot(r3, Heap::kWithContextMapRootIndex); |
| 689 | __ Check(ne, kDeclarationInWithContext); |
| 690 | __ CompareRoot(r3, Heap::kCatchContextMapRootIndex); |
| 691 | __ Check(ne, kDeclarationInCatchContext); |
| 692 | } |
| 693 | } |
| 694 | |
| 695 | void FullCodeGenerator::VisitVariableDeclaration( |
| 696 | VariableDeclaration* declaration) { |
| 697 | // If it was not possible to allocate the variable at compile time, we |
| 698 | // need to "declare" it at runtime to make sure it actually exists in the |
| 699 | // local context. |
| 700 | VariableProxy* proxy = declaration->proxy(); |
| 701 | VariableMode mode = declaration->mode(); |
| 702 | Variable* variable = proxy->var(); |
| 703 | bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; |
| 704 | switch (variable->location()) { |
| 705 | case VariableLocation::GLOBAL: |
| 706 | case VariableLocation::UNALLOCATED: |
| 707 | globals_->Add(variable->name(), zone()); |
| 708 | globals_->Add(variable->binding_needs_init() |
| 709 | ? isolate()->factory()->the_hole_value() |
| 710 | : isolate()->factory()->undefined_value(), |
| 711 | zone()); |
| 712 | break; |
| 713 | |
| 714 | case VariableLocation::PARAMETER: |
| 715 | case VariableLocation::LOCAL: |
| 716 | if (hole_init) { |
| 717 | Comment cmnt(masm_, "[ VariableDeclaration"); |
| 718 | __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 719 | __ StoreP(ip, StackOperand(variable)); |
| 720 | } |
| 721 | break; |
| 722 | |
| 723 | case VariableLocation::CONTEXT: |
| 724 | if (hole_init) { |
| 725 | Comment cmnt(masm_, "[ VariableDeclaration"); |
| 726 | EmitDebugCheckDeclarationContext(variable); |
| 727 | __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 728 | __ StoreP(ip, ContextMemOperand(cp, variable->index())); |
| 729 | // No write barrier since the_hole_value is in old space. |
| 730 | PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 731 | } |
| 732 | break; |
| 733 | |
| 734 | case VariableLocation::LOOKUP: { |
| 735 | Comment cmnt(masm_, "[ VariableDeclaration"); |
| 736 | __ mov(r4, Operand(variable->name())); |
| 737 | // Declaration nodes are always introduced in one of four modes. |
| 738 | DCHECK(IsDeclaredVariableMode(mode)); |
| 739 | // Push initial value, if any. |
| 740 | // Note: For variables we must not push an initial value (such as |
| 741 | // 'undefined') because we may have a (legal) redeclaration and we |
| 742 | // must not destroy the current value. |
| 743 | if (hole_init) { |
| 744 | __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); |
| 745 | } else { |
| 746 | __ LoadSmiLiteral(r2, Smi::FromInt(0)); // Indicates no initial value. |
| 747 | } |
| 748 | __ Push(r4, r2); |
| 749 | __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
| 750 | __ CallRuntime(Runtime::kDeclareLookupSlot); |
| 751 | break; |
| 752 | } |
| 753 | } |
| 754 | } |
| 755 | |
| 756 | void FullCodeGenerator::VisitFunctionDeclaration( |
| 757 | FunctionDeclaration* declaration) { |
| 758 | VariableProxy* proxy = declaration->proxy(); |
| 759 | Variable* variable = proxy->var(); |
| 760 | switch (variable->location()) { |
| 761 | case VariableLocation::GLOBAL: |
| 762 | case VariableLocation::UNALLOCATED: { |
| 763 | globals_->Add(variable->name(), zone()); |
| 764 | Handle<SharedFunctionInfo> function = |
| 765 | Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); |
| 766 | // Check for stack-overflow exception. |
| 767 | if (function.is_null()) return SetStackOverflow(); |
| 768 | globals_->Add(function, zone()); |
| 769 | break; |
| 770 | } |
| 771 | |
| 772 | case VariableLocation::PARAMETER: |
| 773 | case VariableLocation::LOCAL: { |
| 774 | Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 775 | VisitForAccumulatorValue(declaration->fun()); |
| 776 | __ StoreP(result_register(), StackOperand(variable)); |
| 777 | break; |
| 778 | } |
| 779 | |
| 780 | case VariableLocation::CONTEXT: { |
| 781 | Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 782 | EmitDebugCheckDeclarationContext(variable); |
| 783 | VisitForAccumulatorValue(declaration->fun()); |
| 784 | __ StoreP(result_register(), ContextMemOperand(cp, variable->index())); |
| 785 | int offset = Context::SlotOffset(variable->index()); |
| 786 | // We know that we have written a function, which is not a smi. |
| 787 | __ RecordWriteContextSlot(cp, offset, result_register(), r4, |
| 788 | kLRHasBeenSaved, kDontSaveFPRegs, |
| 789 | EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 790 | PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 791 | break; |
| 792 | } |
| 793 | |
| 794 | case VariableLocation::LOOKUP: { |
| 795 | Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 796 | __ mov(r4, Operand(variable->name())); |
| 797 | PushOperand(r4); |
| 798 | // Push initial value for function declaration. |
| 799 | VisitForStackValue(declaration->fun()); |
| 800 | PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
| 801 | CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); |
| 802 | break; |
| 803 | } |
| 804 | } |
| 805 | } |
| 806 | |
| 807 | void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 808 | // Call the runtime to declare the globals. |
| 809 | __ mov(r3, Operand(pairs)); |
| 810 | __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags())); |
| 811 | __ Push(r3, r2); |
| 812 | __ CallRuntime(Runtime::kDeclareGlobals); |
| 813 | // Return value is ignored. |
| 814 | } |
| 815 | |
| 816 | void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { |
| 817 | // Call the runtime to declare the modules. |
| 818 | __ Push(descriptions); |
| 819 | __ CallRuntime(Runtime::kDeclareModules); |
| 820 | // Return value is ignored. |
| 821 | } |
| 822 | |
| 823 | void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
| 824 | Comment cmnt(masm_, "[ SwitchStatement"); |
| 825 | Breakable nested_statement(this, stmt); |
| 826 | SetStatementPosition(stmt); |
| 827 | |
| 828 | // Keep the switch value on the stack until a case matches. |
| 829 | VisitForStackValue(stmt->tag()); |
| 830 | PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 831 | |
| 832 | ZoneList<CaseClause*>* clauses = stmt->cases(); |
| 833 | CaseClause* default_clause = NULL; // Can occur anywhere in the list. |
| 834 | |
| 835 | Label next_test; // Recycled for each test. |
| 836 | // Compile all the tests with branches to their bodies. |
| 837 | for (int i = 0; i < clauses->length(); i++) { |
| 838 | CaseClause* clause = clauses->at(i); |
| 839 | clause->body_target()->Unuse(); |
| 840 | |
| 841 | // The default is not a test, but remember it as final fall through. |
| 842 | if (clause->is_default()) { |
| 843 | default_clause = clause; |
| 844 | continue; |
| 845 | } |
| 846 | |
| 847 | Comment cmnt(masm_, "[ Case comparison"); |
| 848 | __ bind(&next_test); |
| 849 | next_test.Unuse(); |
| 850 | |
| 851 | // Compile the label expression. |
| 852 | VisitForAccumulatorValue(clause->label()); |
| 853 | |
| 854 | // Perform the comparison as if via '==='. |
| 855 | __ LoadP(r3, MemOperand(sp, 0)); // Switch value. |
| 856 | bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 857 | JumpPatchSite patch_site(masm_); |
| 858 | if (inline_smi_code) { |
| 859 | Label slow_case; |
| 860 | __ LoadRR(r4, r2); |
| 861 | __ OrP(r4, r3); |
| 862 | patch_site.EmitJumpIfNotSmi(r4, &slow_case); |
| 863 | |
| 864 | __ CmpP(r3, r2); |
| 865 | __ bne(&next_test); |
| 866 | __ Drop(1); // Switch value is no longer needed. |
| 867 | __ b(clause->body_target()); |
| 868 | __ bind(&slow_case); |
| 869 | } |
| 870 | |
| 871 | // Record position before stub call for type feedback. |
| 872 | SetExpressionPosition(clause); |
| 873 | Handle<Code> ic = |
| 874 | CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); |
| 875 | CallIC(ic, clause->CompareId()); |
| 876 | patch_site.EmitPatchInfo(); |
| 877 | |
| 878 | Label skip; |
| 879 | __ b(&skip); |
| 880 | PrepareForBailout(clause, TOS_REG); |
| 881 | __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 882 | __ bne(&next_test); |
| 883 | __ Drop(1); |
| 884 | __ b(clause->body_target()); |
| 885 | __ bind(&skip); |
| 886 | |
| 887 | __ CmpP(r2, Operand::Zero()); |
| 888 | __ bne(&next_test); |
| 889 | __ Drop(1); // Switch value is no longer needed. |
| 890 | __ b(clause->body_target()); |
| 891 | } |
| 892 | |
| 893 | // Discard the test value and jump to the default if present, otherwise to |
| 894 | // the end of the statement. |
| 895 | __ bind(&next_test); |
| 896 | DropOperands(1); // Switch value is no longer needed. |
| 897 | if (default_clause == NULL) { |
| 898 | __ b(nested_statement.break_label()); |
| 899 | } else { |
| 900 | __ b(default_clause->body_target()); |
| 901 | } |
| 902 | |
| 903 | // Compile all the case bodies. |
| 904 | for (int i = 0; i < clauses->length(); i++) { |
| 905 | Comment cmnt(masm_, "[ Case body"); |
| 906 | CaseClause* clause = clauses->at(i); |
| 907 | __ bind(clause->body_target()); |
| 908 | PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); |
| 909 | VisitStatements(clause->statements()); |
| 910 | } |
| 911 | |
| 912 | __ bind(nested_statement.break_label()); |
| 913 | PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 914 | } |
| 915 | |
| 916 | void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
| 917 | Comment cmnt(masm_, "[ ForInStatement"); |
| 918 | SetStatementPosition(stmt, SKIP_BREAK); |
| 919 | |
| 920 | FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); |
| 921 | |
| 922 | // Get the object to enumerate over. |
| 923 | SetExpressionAsStatementPosition(stmt->enumerable()); |
| 924 | VisitForAccumulatorValue(stmt->enumerable()); |
| 925 | OperandStackDepthIncrement(5); |
| 926 | |
| 927 | Label loop, exit; |
| 928 | Iteration loop_statement(this, stmt); |
| 929 | increment_loop_depth(); |
| 930 | |
| 931 | // If the object is null or undefined, skip over the loop, otherwise convert |
| 932 | // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. |
| 933 | Label convert, done_convert; |
| 934 | __ JumpIfSmi(r2, &convert); |
| 935 | __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
| 936 | __ bge(&done_convert); |
| 937 | __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 938 | __ beq(&exit); |
| 939 | __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
| 940 | __ beq(&exit); |
| 941 | __ bind(&convert); |
| 942 | ToObjectStub stub(isolate()); |
| 943 | __ CallStub(&stub); |
| 944 | __ bind(&done_convert); |
| 945 | PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); |
| 946 | __ push(r2); |
| 947 | |
| 948 | // Check cache validity in generated code. This is a fast case for |
| 949 | // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 950 | // guarantee cache validity, call the runtime system to check cache |
| 951 | // validity or get the property names in a fixed array. |
| 952 | // Note: Proxies never have an enum cache, so will always take the |
| 953 | // slow path. |
| 954 | Label call_runtime; |
| 955 | __ CheckEnumCache(&call_runtime); |
| 956 | |
| 957 | // The enum cache is valid. Load the map of the object being |
| 958 | // iterated over and use the cache for the iteration. |
| 959 | Label use_cache; |
| 960 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 961 | __ b(&use_cache); |
| 962 | |
| 963 | // Get the set of properties to enumerate. |
| 964 | __ bind(&call_runtime); |
| 965 | __ push(r2); // Duplicate the enumerable object on the stack. |
| 966 | __ CallRuntime(Runtime::kForInEnumerate); |
| 967 | PrepareForBailoutForId(stmt->EnumId(), TOS_REG); |
| 968 | |
| 969 | // If we got a map from the runtime call, we can do a fast |
| 970 | // modification check. Otherwise, we got a fixed array, and we have |
| 971 | // to do a slow check. |
| 972 | Label fixed_array; |
| 973 | __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 974 | __ CompareRoot(r4, Heap::kMetaMapRootIndex); |
| 975 | __ bne(&fixed_array); |
| 976 | |
| 977 | // We got a map in register r2. Get the enumeration cache from it. |
| 978 | Label no_descriptors; |
| 979 | __ bind(&use_cache); |
| 980 | |
| 981 | __ EnumLength(r3, r2); |
| 982 | __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
| 983 | __ beq(&no_descriptors, Label::kNear); |
| 984 | |
| 985 | __ LoadInstanceDescriptors(r2, r4); |
| 986 | __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset)); |
| 987 | __ LoadP(r4, |
| 988 | FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 989 | |
| 990 | // Set up the four remaining stack slots. |
| 991 | __ push(r2); // Map. |
| 992 | __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 993 | // Push enumeration cache, enumeration cache length (as smi) and zero. |
| 994 | __ Push(r4, r3, r2); |
| 995 | __ b(&loop); |
| 996 | |
| 997 | __ bind(&no_descriptors); |
| 998 | __ Drop(1); |
| 999 | __ b(&exit); |
| 1000 | |
| 1001 | // We got a fixed array in register r2. Iterate through that. |
| 1002 | __ bind(&fixed_array); |
| 1003 | |
| 1004 | __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check |
| 1005 | __ Push(r3, r2); // Smi and array |
| 1006 | __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 1007 | __ Push(r3); // Fixed array length (as smi). |
| 1008 | PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); |
| 1009 | __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 1010 | __ Push(r2); // Initial index. |
| 1011 | |
| 1012 | // Generate code for doing the condition check. |
| 1013 | __ bind(&loop); |
| 1014 | SetExpressionAsStatementPosition(stmt->each()); |
| 1015 | |
| 1016 | // Load the current count to r2, load the length to r3. |
| 1017 | __ LoadP(r2, MemOperand(sp, 0 * kPointerSize)); |
| 1018 | __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
| 1019 | __ CmpLogicalP(r2, r3); // Compare to the array length. |
| 1020 | __ bge(loop_statement.break_label()); |
| 1021 | |
| 1022 | // Get the current entry of the array into register r5. |
| 1023 | __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
| 1024 | __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1025 | __ SmiToPtrArrayOffset(r5, r2); |
| 1026 | __ LoadP(r5, MemOperand(r5, r4)); |
| 1027 | |
| 1028 | // Get the expected map from the stack or a smi in the |
| 1029 | // permanent slow case into register r4. |
| 1030 | __ LoadP(r4, MemOperand(sp, 3 * kPointerSize)); |
| 1031 | |
| 1032 | // Check if the expected map still matches that of the enumerable. |
| 1033 | // If not, we may have to filter the key. |
| 1034 | Label update_each; |
| 1035 | __ LoadP(r3, MemOperand(sp, 4 * kPointerSize)); |
| 1036 | __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1037 | __ CmpP(r6, r4); |
| 1038 | __ beq(&update_each); |
| 1039 | |
| 1040 | // We need to filter the key, record slow-path here. |
| 1041 | int const vector_index = SmiFromSlot(slot)->value(); |
| 1042 | __ EmitLoadTypeFeedbackVector(r2); |
| 1043 | __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
| 1044 | __ StoreP( |
| 1045 | r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0); |
| 1046 | |
| 1047 | // Convert the entry to a string or (smi) 0 if it isn't a property |
| 1048 | // any more. If the property has been removed while iterating, we |
| 1049 | // just skip it. |
| 1050 | __ Push(r3, r5); // Enumerable and current entry. |
| 1051 | __ CallRuntime(Runtime::kForInFilter); |
| 1052 | PrepareForBailoutForId(stmt->FilterId(), TOS_REG); |
| 1053 | __ LoadRR(r5, r2); |
| 1054 | __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1055 | __ CmpP(r2, r0); |
| 1056 | __ beq(loop_statement.continue_label()); |
| 1057 | |
| 1058 | // Update the 'each' property or variable from the possibly filtered |
| 1059 | // entry in register r5. |
| 1060 | __ bind(&update_each); |
| 1061 | __ LoadRR(result_register(), r5); |
| 1062 | // Perform the assignment as if via '='. |
| 1063 | { |
| 1064 | EffectContext context(this); |
| 1065 | EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); |
| 1066 | PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); |
| 1067 | } |
| 1068 | |
| 1069 | // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). |
| 1070 | PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
| 1071 | // Generate code for the body of the loop. |
| 1072 | Visit(stmt->body()); |
| 1073 | |
| 1074 | // Generate code for the going to the next element by incrementing |
| 1075 | // the index (smi) stored on top of the stack. |
| 1076 | __ bind(loop_statement.continue_label()); |
| 1077 | __ pop(r2); |
| 1078 | __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0); |
| 1079 | __ push(r2); |
| 1080 | |
| 1081 | EmitBackEdgeBookkeeping(stmt, &loop); |
| 1082 | __ b(&loop); |
| 1083 | |
| 1084 | // Remove the pointers stored on the stack. |
| 1085 | __ bind(loop_statement.break_label()); |
| 1086 | DropOperands(5); |
| 1087 | |
| 1088 | // Exit and decrement the loop depth. |
| 1089 | PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 1090 | __ bind(&exit); |
| 1091 | decrement_loop_depth(); |
| 1092 | } |
| 1093 | |
| 1094 | void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, |
| 1095 | FeedbackVectorSlot slot) { |
| 1096 | DCHECK(NeedsHomeObject(initializer)); |
| 1097 | __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
| 1098 | __ mov(StoreDescriptor::NameRegister(), |
| 1099 | Operand(isolate()->factory()->home_object_symbol())); |
| 1100 | __ LoadP(StoreDescriptor::ValueRegister(), |
| 1101 | MemOperand(sp, offset * kPointerSize)); |
| 1102 | EmitLoadStoreICSlot(slot); |
| 1103 | CallStoreIC(); |
| 1104 | } |
| 1105 | |
| 1106 | void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, |
| 1107 | int offset, |
| 1108 | FeedbackVectorSlot slot) { |
| 1109 | DCHECK(NeedsHomeObject(initializer)); |
| 1110 | __ Move(StoreDescriptor::ReceiverRegister(), r2); |
| 1111 | __ mov(StoreDescriptor::NameRegister(), |
| 1112 | Operand(isolate()->factory()->home_object_symbol())); |
| 1113 | __ LoadP(StoreDescriptor::ValueRegister(), |
| 1114 | MemOperand(sp, offset * kPointerSize)); |
| 1115 | EmitLoadStoreICSlot(slot); |
| 1116 | CallStoreIC(); |
| 1117 | } |
| 1118 | |
| 1119 | void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, |
| 1120 | TypeofMode typeof_mode, |
| 1121 | Label* slow) { |
| 1122 | Register current = cp; |
| 1123 | Register next = r3; |
| 1124 | Register temp = r4; |
| 1125 | |
| 1126 | Scope* s = scope(); |
| 1127 | while (s != NULL) { |
| 1128 | if (s->num_heap_slots() > 0) { |
| 1129 | if (s->calls_sloppy_eval()) { |
| 1130 | // Check that extension is "the hole". |
| 1131 | __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); |
| 1132 | __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1133 | } |
| 1134 | // Load next context in chain. |
| 1135 | __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); |
| 1136 | // Walk the rest of the chain without clobbering cp. |
| 1137 | current = next; |
| 1138 | } |
| 1139 | // If no outer scope calls eval, we do not need to check more |
| 1140 | // context extensions. |
| 1141 | if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
| 1142 | s = s->outer_scope(); |
| 1143 | } |
| 1144 | |
| 1145 | if (s->is_eval_scope()) { |
| 1146 | Label loop, fast; |
| 1147 | if (!current.is(next)) { |
| 1148 | __ Move(next, current); |
| 1149 | } |
| 1150 | __ bind(&loop); |
| 1151 | // Terminate at native context. |
| 1152 | __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
| 1153 | __ CompareRoot(temp, Heap::kNativeContextMapRootIndex); |
| 1154 | __ beq(&fast, Label::kNear); |
| 1155 | // Check that extension is "the hole". |
| 1156 | __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); |
| 1157 | __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1158 | // Load next context in chain. |
| 1159 | __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); |
| 1160 | __ b(&loop); |
| 1161 | __ bind(&fast); |
| 1162 | } |
| 1163 | |
| 1164 | // All extension objects were empty and it is safe to use a normal global |
| 1165 | // load machinery. |
| 1166 | EmitGlobalVariableLoad(proxy, typeof_mode); |
| 1167 | } |
| 1168 | |
| 1169 | MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
| 1170 | Label* slow) { |
| 1171 | DCHECK(var->IsContextSlot()); |
| 1172 | Register context = cp; |
| 1173 | Register next = r5; |
| 1174 | Register temp = r6; |
| 1175 | |
| 1176 | for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
| 1177 | if (s->num_heap_slots() > 0) { |
| 1178 | if (s->calls_sloppy_eval()) { |
| 1179 | // Check that extension is "the hole". |
| 1180 | __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
| 1181 | __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1182 | } |
| 1183 | __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 1184 | // Walk the rest of the chain without clobbering cp. |
| 1185 | context = next; |
| 1186 | } |
| 1187 | } |
| 1188 | // Check that last extension is "the hole". |
| 1189 | __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
| 1190 | __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1191 | |
| 1192 | // This function is used only for loads, not stores, so it's safe to |
| 1193 | // return an cp-based operand (the write barrier cannot be allowed to |
| 1194 | // destroy the cp register). |
| 1195 | return ContextMemOperand(context, var->index()); |
| 1196 | } |
| 1197 | |
| 1198 | void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, |
| 1199 | TypeofMode typeof_mode, |
| 1200 | Label* slow, Label* done) { |
| 1201 | // Generate fast-case code for variables that might be shadowed by |
| 1202 | // eval-introduced variables. Eval is used a lot without |
| 1203 | // introducing variables. In those cases, we do not want to |
| 1204 | // perform a runtime call for all variables in the scope |
| 1205 | // containing the eval. |
| 1206 | Variable* var = proxy->var(); |
| 1207 | if (var->mode() == DYNAMIC_GLOBAL) { |
| 1208 | EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); |
| 1209 | __ b(done); |
| 1210 | } else if (var->mode() == DYNAMIC_LOCAL) { |
| 1211 | Variable* local = var->local_if_not_shadowed(); |
| 1212 | __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow)); |
| 1213 | if (local->mode() == LET || local->mode() == CONST || |
| 1214 | local->mode() == CONST_LEGACY) { |
| 1215 | __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 1216 | __ bne(done); |
| 1217 | if (local->mode() == CONST_LEGACY) { |
| 1218 | __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1219 | } else { // LET || CONST |
| 1220 | __ mov(r2, Operand(var->name())); |
| 1221 | __ push(r2); |
| 1222 | __ CallRuntime(Runtime::kThrowReferenceError); |
| 1223 | } |
| 1224 | } |
| 1225 | __ b(done); |
| 1226 | } |
| 1227 | } |
| 1228 | |
| 1229 | void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, |
| 1230 | TypeofMode typeof_mode) { |
| 1231 | Variable* var = proxy->var(); |
| 1232 | DCHECK(var->IsUnallocatedOrGlobalSlot() || |
| 1233 | (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); |
| 1234 | __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); |
| 1235 | __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); |
| 1236 | __ mov(LoadDescriptor::SlotRegister(), |
| 1237 | Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); |
| 1238 | CallLoadIC(typeof_mode); |
| 1239 | } |
| 1240 | |
| 1241 | void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, |
| 1242 | TypeofMode typeof_mode) { |
| 1243 | // Record position before possible IC call. |
| 1244 | SetExpressionPosition(proxy); |
| 1245 | PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); |
| 1246 | Variable* var = proxy->var(); |
| 1247 | |
| 1248 | // Three cases: global variables, lookup variables, and all other types of |
| 1249 | // variables. |
| 1250 | switch (var->location()) { |
| 1251 | case VariableLocation::GLOBAL: |
| 1252 | case VariableLocation::UNALLOCATED: { |
| 1253 | Comment cmnt(masm_, "[ Global variable"); |
| 1254 | EmitGlobalVariableLoad(proxy, typeof_mode); |
| 1255 | context()->Plug(r2); |
| 1256 | break; |
| 1257 | } |
| 1258 | |
| 1259 | case VariableLocation::PARAMETER: |
| 1260 | case VariableLocation::LOCAL: |
| 1261 | case VariableLocation::CONTEXT: { |
| 1262 | DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); |
| 1263 | Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" |
| 1264 | : "[ Stack variable"); |
| 1265 | if (NeedsHoleCheckForLoad(proxy)) { |
| 1266 | Label done; |
| 1267 | // Let and const need a read barrier. |
| 1268 | GetVar(r2, var); |
| 1269 | __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 1270 | __ bne(&done); |
| 1271 | if (var->mode() == LET || var->mode() == CONST) { |
| 1272 | // Throw a reference error when using an uninitialized let/const |
| 1273 | // binding in harmony mode. |
| 1274 | __ mov(r2, Operand(var->name())); |
| 1275 | __ push(r2); |
| 1276 | __ CallRuntime(Runtime::kThrowReferenceError); |
| 1277 | } else { |
| 1278 | // Uninitialized legacy const bindings are unholed. |
| 1279 | DCHECK(var->mode() == CONST_LEGACY); |
| 1280 | __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1281 | } |
| 1282 | __ bind(&done); |
| 1283 | context()->Plug(r2); |
| 1284 | break; |
| 1285 | } |
| 1286 | context()->Plug(var); |
| 1287 | break; |
| 1288 | } |
| 1289 | |
| 1290 | case VariableLocation::LOOKUP: { |
| 1291 | Comment cmnt(masm_, "[ Lookup variable"); |
| 1292 | Label done, slow; |
| 1293 | // Generate code for loading from variables potentially shadowed |
| 1294 | // by eval-introduced variables. |
| 1295 | EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); |
| 1296 | __ bind(&slow); |
| 1297 | __ Push(var->name()); |
| 1298 | Runtime::FunctionId function_id = |
| 1299 | typeof_mode == NOT_INSIDE_TYPEOF |
| 1300 | ? Runtime::kLoadLookupSlot |
| 1301 | : Runtime::kLoadLookupSlotInsideTypeof; |
| 1302 | __ CallRuntime(function_id); |
| 1303 | __ bind(&done); |
| 1304 | context()->Plug(r2); |
| 1305 | } |
| 1306 | } |
| 1307 | } |
| 1308 | |
| 1309 | void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
| 1310 | Comment cmnt(masm_, "[ RegExpLiteral"); |
| 1311 | __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1312 | __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1313 | __ mov(r3, Operand(expr->pattern())); |
| 1314 | __ LoadSmiLiteral(r2, Smi::FromInt(expr->flags())); |
| 1315 | FastCloneRegExpStub stub(isolate()); |
| 1316 | __ CallStub(&stub); |
| 1317 | context()->Plug(r2); |
| 1318 | } |
| 1319 | |
| 1320 | void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { |
| 1321 | Expression* expression = (property == NULL) ? NULL : property->value(); |
| 1322 | if (expression == NULL) { |
| 1323 | __ LoadRoot(r3, Heap::kNullValueRootIndex); |
| 1324 | PushOperand(r3); |
| 1325 | } else { |
| 1326 | VisitForStackValue(expression); |
| 1327 | if (NeedsHomeObject(expression)) { |
| 1328 | DCHECK(property->kind() == ObjectLiteral::Property::GETTER || |
| 1329 | property->kind() == ObjectLiteral::Property::SETTER); |
| 1330 | int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; |
| 1331 | EmitSetHomeObject(expression, offset, property->GetSlot()); |
| 1332 | } |
| 1333 | } |
| 1334 | } |
| 1335 | |
| 1336 | void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
| 1337 | Comment cmnt(masm_, "[ ObjectLiteral"); |
| 1338 | |
| 1339 | Handle<FixedArray> constant_properties = expr->constant_properties(); |
| 1340 | __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1341 | __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1342 | __ mov(r3, Operand(constant_properties)); |
| 1343 | int flags = expr->ComputeFlags(); |
| 1344 | __ LoadSmiLiteral(r2, Smi::FromInt(flags)); |
| 1345 | if (MustCreateObjectLiteralWithRuntime(expr)) { |
| 1346 | __ Push(r5, r4, r3, r2); |
| 1347 | __ CallRuntime(Runtime::kCreateObjectLiteral); |
| 1348 | } else { |
| 1349 | FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); |
| 1350 | __ CallStub(&stub); |
| 1351 | } |
| 1352 | PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
| 1353 | |
| 1354 | // If result_saved is true the result is on top of the stack. If |
| 1355 | // result_saved is false the result is in r2. |
| 1356 | bool result_saved = false; |
| 1357 | |
| 1358 | AccessorTable accessor_table(zone()); |
| 1359 | int property_index = 0; |
| 1360 | for (; property_index < expr->properties()->length(); property_index++) { |
| 1361 | ObjectLiteral::Property* property = expr->properties()->at(property_index); |
| 1362 | if (property->is_computed_name()) break; |
| 1363 | if (property->IsCompileTimeValue()) continue; |
| 1364 | |
| 1365 | Literal* key = property->key()->AsLiteral(); |
| 1366 | Expression* value = property->value(); |
| 1367 | if (!result_saved) { |
| 1368 | PushOperand(r2); // Save result on stack |
| 1369 | result_saved = true; |
| 1370 | } |
| 1371 | switch (property->kind()) { |
| 1372 | case ObjectLiteral::Property::CONSTANT: |
| 1373 | UNREACHABLE(); |
| 1374 | case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 1375 | DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); |
| 1376 | // Fall through. |
| 1377 | case ObjectLiteral::Property::COMPUTED: |
| 1378 | // It is safe to use [[Put]] here because the boilerplate already |
| 1379 | // contains computed properties with an uninitialized value. |
| 1380 | if (key->value()->IsInternalizedString()) { |
| 1381 | if (property->emit_store()) { |
| 1382 | VisitForAccumulatorValue(value); |
| 1383 | DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
| 1384 | __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); |
| 1385 | __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
| 1386 | EmitLoadStoreICSlot(property->GetSlot(0)); |
| 1387 | CallStoreIC(); |
| 1388 | PrepareForBailoutForId(key->id(), NO_REGISTERS); |
| 1389 | |
| 1390 | if (NeedsHomeObject(value)) { |
| 1391 | EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); |
| 1392 | } |
| 1393 | } else { |
| 1394 | VisitForEffect(value); |
| 1395 | } |
| 1396 | break; |
| 1397 | } |
| 1398 | // Duplicate receiver on stack. |
| 1399 | __ LoadP(r2, MemOperand(sp)); |
| 1400 | PushOperand(r2); |
| 1401 | VisitForStackValue(key); |
| 1402 | VisitForStackValue(value); |
| 1403 | if (property->emit_store()) { |
| 1404 | if (NeedsHomeObject(value)) { |
| 1405 | EmitSetHomeObject(value, 2, property->GetSlot()); |
| 1406 | } |
| 1407 | __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes |
| 1408 | PushOperand(r2); |
| 1409 | CallRuntimeWithOperands(Runtime::kSetProperty); |
| 1410 | } else { |
| 1411 | DropOperands(3); |
| 1412 | } |
| 1413 | break; |
| 1414 | case ObjectLiteral::Property::PROTOTYPE: |
| 1415 | // Duplicate receiver on stack. |
| 1416 | __ LoadP(r2, MemOperand(sp)); |
| 1417 | PushOperand(r2); |
| 1418 | VisitForStackValue(value); |
| 1419 | DCHECK(property->emit_store()); |
| 1420 | CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
| 1421 | PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
| 1422 | NO_REGISTERS); |
| 1423 | break; |
| 1424 | case ObjectLiteral::Property::GETTER: |
| 1425 | if (property->emit_store()) { |
| 1426 | accessor_table.lookup(key)->second->getter = property; |
| 1427 | } |
| 1428 | break; |
| 1429 | case ObjectLiteral::Property::SETTER: |
| 1430 | if (property->emit_store()) { |
| 1431 | accessor_table.lookup(key)->second->setter = property; |
| 1432 | } |
| 1433 | break; |
| 1434 | } |
| 1435 | } |
| 1436 | |
| 1437 | // Emit code to define accessors, using only a single call to the runtime for |
| 1438 | // each pair of corresponding getters and setters. |
| 1439 | for (AccessorTable::Iterator it = accessor_table.begin(); |
| 1440 | it != accessor_table.end(); ++it) { |
| 1441 | __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
| 1442 | PushOperand(r2); |
| 1443 | VisitForStackValue(it->first); |
| 1444 | EmitAccessor(it->second->getter); |
| 1445 | EmitAccessor(it->second->setter); |
| 1446 | __ LoadSmiLiteral(r2, Smi::FromInt(NONE)); |
| 1447 | PushOperand(r2); |
| 1448 | CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); |
| 1449 | } |
| 1450 | |
| 1451 | // Object literals have two parts. The "static" part on the left contains no |
| 1452 | // computed property names, and so we can compute its map ahead of time; see |
| 1453 | // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part |
| 1454 | // starts with the first computed property name, and continues with all |
| 1455 | // properties to its right. All the code from above initializes the static |
| 1456 | // component of the object literal, and arranges for the map of the result to |
| 1457 | // reflect the static order in which the keys appear. For the dynamic |
| 1458 | // properties, we compile them into a series of "SetOwnProperty" runtime |
| 1459 | // calls. This will preserve insertion order. |
| 1460 | for (; property_index < expr->properties()->length(); property_index++) { |
| 1461 | ObjectLiteral::Property* property = expr->properties()->at(property_index); |
| 1462 | |
| 1463 | Expression* value = property->value(); |
| 1464 | if (!result_saved) { |
| 1465 | PushOperand(r2); // Save result on the stack |
| 1466 | result_saved = true; |
| 1467 | } |
| 1468 | |
| 1469 | __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
| 1470 | PushOperand(r2); |
| 1471 | |
| 1472 | if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { |
| 1473 | DCHECK(!property->is_computed_name()); |
| 1474 | VisitForStackValue(value); |
| 1475 | DCHECK(property->emit_store()); |
| 1476 | CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
| 1477 | PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
| 1478 | NO_REGISTERS); |
| 1479 | } else { |
| 1480 | EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); |
| 1481 | VisitForStackValue(value); |
| 1482 | if (NeedsHomeObject(value)) { |
| 1483 | EmitSetHomeObject(value, 2, property->GetSlot()); |
| 1484 | } |
| 1485 | |
| 1486 | switch (property->kind()) { |
| 1487 | case ObjectLiteral::Property::CONSTANT: |
| 1488 | case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 1489 | case ObjectLiteral::Property::COMPUTED: |
| 1490 | if (property->emit_store()) { |
| 1491 | PushOperand(Smi::FromInt(NONE)); |
| 1492 | PushOperand(Smi::FromInt(property->NeedsSetFunctionName())); |
| 1493 | CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral); |
| 1494 | } else { |
| 1495 | DropOperands(3); |
| 1496 | } |
| 1497 | break; |
| 1498 | |
| 1499 | case ObjectLiteral::Property::PROTOTYPE: |
| 1500 | UNREACHABLE(); |
| 1501 | break; |
| 1502 | |
| 1503 | case ObjectLiteral::Property::GETTER: |
| 1504 | PushOperand(Smi::FromInt(NONE)); |
| 1505 | CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked); |
| 1506 | break; |
| 1507 | |
| 1508 | case ObjectLiteral::Property::SETTER: |
| 1509 | PushOperand(Smi::FromInt(NONE)); |
| 1510 | CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
| 1511 | break; |
| 1512 | } |
| 1513 | } |
| 1514 | } |
| 1515 | |
| 1516 | if (result_saved) { |
| 1517 | context()->PlugTOS(); |
| 1518 | } else { |
| 1519 | context()->Plug(r2); |
| 1520 | } |
| 1521 | } |
| 1522 | |
| 1523 | void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
| 1524 | Comment cmnt(masm_, "[ ArrayLiteral"); |
| 1525 | |
| 1526 | Handle<FixedArray> constant_elements = expr->constant_elements(); |
| 1527 | bool has_fast_elements = |
| 1528 | IsFastObjectElementsKind(expr->constant_elements_kind()); |
| 1529 | Handle<FixedArrayBase> constant_elements_values( |
| 1530 | FixedArrayBase::cast(constant_elements->get(1))); |
| 1531 | |
| 1532 | AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
| 1533 | if (has_fast_elements && !FLAG_allocation_site_pretenuring) { |
| 1534 | // If the only customer of allocation sites is transitioning, then |
| 1535 | // we can turn it off if we don't have anywhere else to transition to. |
| 1536 | allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
| 1537 | } |
| 1538 | |
| 1539 | __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1540 | __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1541 | __ mov(r3, Operand(constant_elements)); |
| 1542 | if (MustCreateArrayLiteralWithRuntime(expr)) { |
| 1543 | __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags())); |
| 1544 | __ Push(r5, r4, r3, r2); |
| 1545 | __ CallRuntime(Runtime::kCreateArrayLiteral); |
| 1546 | } else { |
| 1547 | FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); |
| 1548 | __ CallStub(&stub); |
| 1549 | } |
| 1550 | PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
| 1551 | |
| 1552 | bool result_saved = false; // Is the result saved to the stack? |
| 1553 | ZoneList<Expression*>* subexprs = expr->values(); |
| 1554 | int length = subexprs->length(); |
| 1555 | |
| 1556 | // Emit code to evaluate all the non-constant subexpressions and to store |
| 1557 | // them into the newly cloned array. |
| 1558 | int array_index = 0; |
| 1559 | for (; array_index < length; array_index++) { |
| 1560 | Expression* subexpr = subexprs->at(array_index); |
| 1561 | DCHECK(!subexpr->IsSpread()); |
| 1562 | // If the subexpression is a literal or a simple materialized literal it |
| 1563 | // is already set in the cloned array. |
| 1564 | if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; |
| 1565 | |
| 1566 | if (!result_saved) { |
| 1567 | PushOperand(r2); |
| 1568 | result_saved = true; |
| 1569 | } |
| 1570 | VisitForAccumulatorValue(subexpr); |
| 1571 | |
| 1572 | __ LoadSmiLiteral(StoreDescriptor::NameRegister(), |
| 1573 | Smi::FromInt(array_index)); |
| 1574 | __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 1575 | EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); |
| 1576 | Handle<Code> ic = |
| 1577 | CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 1578 | CallIC(ic); |
| 1579 | |
| 1580 | PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
| 1581 | } |
| 1582 | |
| 1583 | // In case the array literal contains spread expressions it has two parts. The |
| 1584 | // first part is the "static" array which has a literal index is handled |
| 1585 | // above. The second part is the part after the first spread expression |
| 1586 | // (inclusive) and these elements gets appended to the array. Note that the |
| 1587 | // number elements an iterable produces is unknown ahead of time. |
| 1588 | if (array_index < length && result_saved) { |
| 1589 | PopOperand(r2); |
| 1590 | result_saved = false; |
| 1591 | } |
| 1592 | for (; array_index < length; array_index++) { |
| 1593 | Expression* subexpr = subexprs->at(array_index); |
| 1594 | |
| 1595 | PushOperand(r2); |
| 1596 | DCHECK(!subexpr->IsSpread()); |
| 1597 | VisitForStackValue(subexpr); |
| 1598 | CallRuntimeWithOperands(Runtime::kAppendElement); |
| 1599 | |
| 1600 | PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
| 1601 | } |
| 1602 | |
| 1603 | if (result_saved) { |
| 1604 | context()->PlugTOS(); |
| 1605 | } else { |
| 1606 | context()->Plug(r2); |
| 1607 | } |
| 1608 | } |
| 1609 | |
| 1610 | void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
| 1611 | DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); |
| 1612 | |
| 1613 | Comment cmnt(masm_, "[ Assignment"); |
| 1614 | SetExpressionPosition(expr, INSERT_BREAK); |
| 1615 | |
| 1616 | Property* property = expr->target()->AsProperty(); |
| 1617 | LhsKind assign_type = Property::GetAssignType(property); |
| 1618 | |
| 1619 | // Evaluate LHS expression. |
| 1620 | switch (assign_type) { |
| 1621 | case VARIABLE: |
| 1622 | // Nothing to do here. |
| 1623 | break; |
| 1624 | case NAMED_PROPERTY: |
| 1625 | if (expr->is_compound()) { |
| 1626 | // We need the receiver both on the stack and in the register. |
| 1627 | VisitForStackValue(property->obj()); |
| 1628 | __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 1629 | } else { |
| 1630 | VisitForStackValue(property->obj()); |
| 1631 | } |
| 1632 | break; |
| 1633 | case NAMED_SUPER_PROPERTY: |
| 1634 | VisitForStackValue( |
| 1635 | property->obj()->AsSuperPropertyReference()->this_var()); |
| 1636 | VisitForAccumulatorValue( |
| 1637 | property->obj()->AsSuperPropertyReference()->home_object()); |
| 1638 | PushOperand(result_register()); |
| 1639 | if (expr->is_compound()) { |
| 1640 | const Register scratch = r3; |
| 1641 | __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
| 1642 | PushOperands(scratch, result_register()); |
| 1643 | } |
| 1644 | break; |
| 1645 | case KEYED_SUPER_PROPERTY: { |
| 1646 | const Register scratch = r3; |
| 1647 | VisitForStackValue( |
| 1648 | property->obj()->AsSuperPropertyReference()->this_var()); |
| 1649 | VisitForAccumulatorValue( |
| 1650 | property->obj()->AsSuperPropertyReference()->home_object()); |
| 1651 | __ LoadRR(scratch, result_register()); |
| 1652 | VisitForAccumulatorValue(property->key()); |
| 1653 | PushOperands(scratch, result_register()); |
| 1654 | if (expr->is_compound()) { |
| 1655 | const Register scratch1 = r4; |
| 1656 | __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
| 1657 | PushOperands(scratch1, scratch, result_register()); |
| 1658 | } |
| 1659 | break; |
| 1660 | } |
| 1661 | case KEYED_PROPERTY: |
| 1662 | if (expr->is_compound()) { |
| 1663 | VisitForStackValue(property->obj()); |
| 1664 | VisitForStackValue(property->key()); |
| 1665 | __ LoadP(LoadDescriptor::ReceiverRegister(), |
| 1666 | MemOperand(sp, 1 * kPointerSize)); |
| 1667 | __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); |
| 1668 | } else { |
| 1669 | VisitForStackValue(property->obj()); |
| 1670 | VisitForStackValue(property->key()); |
| 1671 | } |
| 1672 | break; |
| 1673 | } |
| 1674 | |
| 1675 | // For compound assignments we need another deoptimization point after the |
| 1676 | // variable/property load. |
| 1677 | if (expr->is_compound()) { |
| 1678 | { |
| 1679 | AccumulatorValueContext context(this); |
| 1680 | switch (assign_type) { |
| 1681 | case VARIABLE: |
| 1682 | EmitVariableLoad(expr->target()->AsVariableProxy()); |
| 1683 | PrepareForBailout(expr->target(), TOS_REG); |
| 1684 | break; |
| 1685 | case NAMED_PROPERTY: |
| 1686 | EmitNamedPropertyLoad(property); |
| 1687 | PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1688 | break; |
| 1689 | case NAMED_SUPER_PROPERTY: |
| 1690 | EmitNamedSuperPropertyLoad(property); |
| 1691 | PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1692 | break; |
| 1693 | case KEYED_SUPER_PROPERTY: |
| 1694 | EmitKeyedSuperPropertyLoad(property); |
| 1695 | PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1696 | break; |
| 1697 | case KEYED_PROPERTY: |
| 1698 | EmitKeyedPropertyLoad(property); |
| 1699 | PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1700 | break; |
| 1701 | } |
| 1702 | } |
| 1703 | |
| 1704 | Token::Value op = expr->binary_op(); |
| 1705 | PushOperand(r2); // Left operand goes on the stack. |
| 1706 | VisitForAccumulatorValue(expr->value()); |
| 1707 | |
| 1708 | AccumulatorValueContext context(this); |
| 1709 | if (ShouldInlineSmiCase(op)) { |
| 1710 | EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), |
| 1711 | expr->value()); |
| 1712 | } else { |
| 1713 | EmitBinaryOp(expr->binary_operation(), op); |
| 1714 | } |
| 1715 | |
| 1716 | // Deoptimization point in case the binary operation may have side effects. |
| 1717 | PrepareForBailout(expr->binary_operation(), TOS_REG); |
| 1718 | } else { |
| 1719 | VisitForAccumulatorValue(expr->value()); |
| 1720 | } |
| 1721 | |
| 1722 | SetExpressionPosition(expr); |
| 1723 | |
| 1724 | // Store the value. |
| 1725 | switch (assign_type) { |
| 1726 | case VARIABLE: |
| 1727 | EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
| 1728 | expr->op(), expr->AssignmentSlot()); |
| 1729 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1730 | context()->Plug(r2); |
| 1731 | break; |
| 1732 | case NAMED_PROPERTY: |
| 1733 | EmitNamedPropertyAssignment(expr); |
| 1734 | break; |
| 1735 | case NAMED_SUPER_PROPERTY: |
| 1736 | EmitNamedSuperPropertyStore(property); |
| 1737 | context()->Plug(r2); |
| 1738 | break; |
| 1739 | case KEYED_SUPER_PROPERTY: |
| 1740 | EmitKeyedSuperPropertyStore(property); |
| 1741 | context()->Plug(r2); |
| 1742 | break; |
| 1743 | case KEYED_PROPERTY: |
| 1744 | EmitKeyedPropertyAssignment(expr); |
| 1745 | break; |
| 1746 | } |
| 1747 | } |
| 1748 | |
| 1749 | void FullCodeGenerator::VisitYield(Yield* expr) { |
| 1750 | Comment cmnt(masm_, "[ Yield"); |
| 1751 | SetExpressionPosition(expr); |
| 1752 | |
| 1753 | // Evaluate yielded value first; the initial iterator definition depends on |
| 1754 | // this. It stays on the stack while we update the iterator. |
| 1755 | VisitForStackValue(expr->expression()); |
| 1756 | |
| 1757 | Label suspend, continuation, post_runtime, resume; |
| 1758 | |
| 1759 | __ b(&suspend); |
| 1760 | __ bind(&continuation); |
| 1761 | // When we arrive here, the stack top is the resume mode and |
| 1762 | // result_register() holds the input value (the argument given to the |
| 1763 | // respective resume operation). |
| 1764 | __ RecordGeneratorContinuation(); |
| 1765 | __ pop(r3); |
| 1766 | __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::RETURN), r0); |
| 1767 | __ bne(&resume); |
| 1768 | __ push(result_register()); |
| 1769 | EmitCreateIteratorResult(true); |
| 1770 | EmitUnwindAndReturn(); |
| 1771 | |
| 1772 | __ bind(&suspend); |
| 1773 | OperandStackDepthIncrement(1); // Not popped on this path. |
| 1774 | VisitForAccumulatorValue(expr->generator_object()); |
| 1775 | DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); |
| 1776 | __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos())); |
| 1777 | __ StoreP(r3, FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset), |
| 1778 | r0); |
| 1779 | __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset), r0); |
| 1780 | __ LoadRR(r3, cp); |
| 1781 | __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4, |
| 1782 | kLRHasBeenSaved, kDontSaveFPRegs); |
| 1783 | __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset)); |
| 1784 | __ CmpP(sp, r3); |
| 1785 | __ beq(&post_runtime); |
| 1786 | __ push(r2); // generator object |
| 1787 | __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
| 1788 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 1789 | __ bind(&post_runtime); |
| 1790 | PopOperand(result_register()); |
| 1791 | EmitReturnSequence(); |
| 1792 | |
| 1793 | __ bind(&resume); |
| 1794 | context()->Plug(result_register()); |
| 1795 | } |
| 1796 | |
| 1797 | void FullCodeGenerator::EmitGeneratorResume( |
| 1798 | Expression* generator, Expression* value, |
| 1799 | JSGeneratorObject::ResumeMode resume_mode) { |
| 1800 | // The value stays in r2, and is ultimately read by the resumed generator, as |
| 1801 | // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it |
| 1802 | // is read to throw the value when the resumed generator is already closed. |
| 1803 | // r3 will hold the generator object until the activation has been resumed. |
| 1804 | VisitForStackValue(generator); |
| 1805 | VisitForAccumulatorValue(value); |
| 1806 | PopOperand(r3); |
| 1807 | |
| 1808 | // Store input value into generator object. |
| 1809 | __ StoreP(result_register(), |
| 1810 | FieldMemOperand(r3, JSGeneratorObject::kInputOffset), r0); |
| 1811 | __ LoadRR(r4, result_register()); |
| 1812 | __ RecordWriteField(r3, JSGeneratorObject::kInputOffset, r4, r5, |
| 1813 | kLRHasBeenSaved, kDontSaveFPRegs); |
| 1814 | |
| 1815 | // Load suspended function and context. |
| 1816 | __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset)); |
| 1817 | __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); |
| 1818 | |
| 1819 | // Load receiver and store as the first argument. |
| 1820 | __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset)); |
| 1821 | __ push(r4); |
| 1822 | |
| 1823 | // Push holes for arguments to generator function. Since the parser forced |
| 1824 | // context allocation for any variables in generators, the actual argument |
| 1825 | // values have already been copied into the context and these dummy values |
| 1826 | // will never be used. |
| 1827 | __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); |
| 1828 | __ LoadW( |
| 1829 | r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1830 | __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); |
| 1831 | Label argument_loop, push_frame; |
| 1832 | #if V8_TARGET_ARCH_S390X |
| 1833 | __ CmpP(r5, Operand::Zero()); |
| 1834 | __ beq(&push_frame, Label::kNear); |
| 1835 | #else |
| 1836 | __ SmiUntag(r5); |
| 1837 | __ beq(&push_frame, Label::kNear); |
| 1838 | #endif |
| 1839 | __ LoadRR(r0, r5); |
| 1840 | __ bind(&argument_loop); |
| 1841 | __ push(r4); |
| 1842 | __ SubP(r0, Operand(1)); |
| 1843 | __ bne(&argument_loop); |
| 1844 | |
| 1845 | // Enter a new JavaScript frame, and initialize its slots as they were when |
| 1846 | // the generator was suspended. |
| 1847 | Label resume_frame, done; |
| 1848 | __ bind(&push_frame); |
| 1849 | __ b(r14, &resume_frame); // brasl |
| 1850 | __ b(&done); |
| 1851 | __ bind(&resume_frame); |
| 1852 | // lr = return address. |
| 1853 | // fp = caller's frame pointer. |
| 1854 | // cp = callee's context, |
| 1855 | // r6 = callee's JS function. |
| 1856 | __ PushStandardFrame(r6); |
| 1857 | |
| 1858 | // Load the operand stack size. |
| 1859 | __ LoadP(r5, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset)); |
| 1860 | __ LoadP(r5, FieldMemOperand(r5, FixedArray::kLengthOffset)); |
| 1861 | __ SmiUntag(r5); |
| 1862 | |
| 1863 | // If we are sending a value and there is no operand stack, we can jump back |
| 1864 | // in directly. |
| 1865 | Label call_resume; |
| 1866 | if (resume_mode == JSGeneratorObject::NEXT) { |
| 1867 | Label slow_resume; |
| 1868 | __ bne(&slow_resume, Label::kNear); |
| 1869 | __ LoadP(ip, FieldMemOperand(r6, JSFunction::kCodeEntryOffset)); |
| 1870 | __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
| 1871 | __ SmiUntag(r4); |
| 1872 | __ AddP(ip, ip, r4); |
| 1873 | __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
| 1874 | __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
| 1875 | __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
| 1876 | __ Jump(ip); |
| 1877 | __ bind(&slow_resume); |
| 1878 | } else { |
| 1879 | __ beq(&call_resume); |
| 1880 | } |
| 1881 | |
| 1882 | // Otherwise, we push holes for the operand stack and call the runtime to fix |
| 1883 | // up the stack and the handlers. |
| 1884 | Label operand_loop; |
| 1885 | __ LoadRR(r0, r5); |
| 1886 | __ bind(&operand_loop); |
| 1887 | __ push(r4); |
| 1888 | __ SubP(r0, Operand(1)); |
| 1889 | __ bne(&operand_loop); |
| 1890 | |
| 1891 | __ bind(&call_resume); |
| 1892 | __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
| 1893 | DCHECK(!result_register().is(r3)); |
| 1894 | __ Push(r3, result_register()); |
| 1895 | __ Push(Smi::FromInt(resume_mode)); |
| 1896 | __ CallRuntime(Runtime::kResumeJSGeneratorObject); |
| 1897 | // Not reached: the runtime call returns elsewhere. |
| 1898 | __ stop("not-reached"); |
| 1899 | |
| 1900 | __ bind(&done); |
| 1901 | context()->Plug(result_register()); |
| 1902 | } |
| 1903 | |
| 1904 | void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { |
| 1905 | OperandStackDepthIncrement(2); |
| 1906 | __ Push(reg1, reg2); |
| 1907 | } |
| 1908 | |
| 1909 | void FullCodeGenerator::PushOperands(Register reg1, Register reg2, |
| 1910 | Register reg3) { |
| 1911 | OperandStackDepthIncrement(3); |
| 1912 | __ Push(reg1, reg2, reg3); |
| 1913 | } |
| 1914 | |
| 1915 | void FullCodeGenerator::PushOperands(Register reg1, Register reg2, |
| 1916 | Register reg3, Register reg4) { |
| 1917 | OperandStackDepthIncrement(4); |
| 1918 | __ Push(reg1, reg2, reg3, reg4); |
| 1919 | } |
| 1920 | |
| 1921 | void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { |
| 1922 | OperandStackDepthDecrement(2); |
| 1923 | __ Pop(reg1, reg2); |
| 1924 | } |
| 1925 | |
| 1926 | void FullCodeGenerator::EmitOperandStackDepthCheck() { |
| 1927 | if (FLAG_debug_code) { |
| 1928 | int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1929 | operand_stack_depth_ * kPointerSize; |
| 1930 | __ SubP(r2, fp, sp); |
| 1931 | __ CmpP(r2, Operand(expected_diff)); |
| 1932 | __ Assert(eq, kUnexpectedStackDepth); |
| 1933 | } |
| 1934 | } |
| 1935 | |
| 1936 | void FullCodeGenerator::EmitCreateIteratorResult(bool done) { |
| 1937 | Label allocate, done_allocate; |
| 1938 | |
| 1939 | __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate, TAG_OBJECT); |
| 1940 | __ b(&done_allocate); |
| 1941 | |
| 1942 | __ bind(&allocate); |
| 1943 | __ Push(Smi::FromInt(JSIteratorResult::kSize)); |
| 1944 | __ CallRuntime(Runtime::kAllocateInNewSpace); |
| 1945 | |
| 1946 | __ bind(&done_allocate); |
| 1947 | __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
| 1948 | PopOperand(r4); |
| 1949 | __ LoadRoot(r5, |
| 1950 | done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); |
| 1951 | __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 1952 | __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
| 1953 | __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
| 1954 | __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
| 1955 | __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
| 1956 | __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
| 1957 | } |
| 1958 | |
| 1959 | void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
| 1960 | Token::Value op, |
| 1961 | Expression* left_expr, |
| 1962 | Expression* right_expr) { |
| 1963 | Label done, smi_case, stub_call; |
| 1964 | |
| 1965 | Register scratch1 = r4; |
| 1966 | Register scratch2 = r5; |
| 1967 | |
| 1968 | // Get the arguments. |
| 1969 | Register left = r3; |
| 1970 | Register right = r2; |
| 1971 | PopOperand(left); |
| 1972 | |
| 1973 | // Perform combined smi check on both operands. |
| 1974 | __ LoadRR(scratch1, right); |
| 1975 | __ OrP(scratch1, left); |
| 1976 | STATIC_ASSERT(kSmiTag == 0); |
| 1977 | JumpPatchSite patch_site(masm_); |
| 1978 | patch_site.EmitJumpIfSmi(scratch1, &smi_case); |
| 1979 | |
| 1980 | __ bind(&stub_call); |
| 1981 | Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
| 1982 | CallIC(code, expr->BinaryOperationFeedbackId()); |
| 1983 | patch_site.EmitPatchInfo(); |
| 1984 | __ b(&done); |
| 1985 | |
| 1986 | __ bind(&smi_case); |
| 1987 | // Smi case. This code works the same way as the smi-smi case in the type |
| 1988 | // recording binary operation stub. |
| 1989 | switch (op) { |
| 1990 | case Token::SAR: |
| 1991 | __ GetLeastBitsFromSmi(scratch1, right, 5); |
| 1992 | __ ShiftRightArithP(right, left, scratch1); |
| 1993 | __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); |
| 1994 | break; |
| 1995 | case Token::SHL: { |
| 1996 | __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 1997 | #if V8_TARGET_ARCH_S390X |
| 1998 | __ ShiftLeftP(right, left, scratch2); |
| 1999 | #else |
| 2000 | __ SmiUntag(scratch1, left); |
| 2001 | __ ShiftLeftP(scratch1, scratch1, scratch2); |
| 2002 | // Check that the *signed* result fits in a smi |
| 2003 | __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); |
| 2004 | __ SmiTag(right, scratch1); |
| 2005 | #endif |
| 2006 | break; |
| 2007 | } |
| 2008 | case Token::SHR: { |
| 2009 | __ SmiUntag(scratch1, left); |
| 2010 | __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 2011 | __ srl(scratch1, scratch2); |
| 2012 | // Unsigned shift is not allowed to produce a negative number. |
| 2013 | __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); |
| 2014 | __ SmiTag(right, scratch1); |
| 2015 | break; |
| 2016 | } |
| 2017 | case Token::ADD: { |
| 2018 | __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
| 2019 | __ BranchOnOverflow(&stub_call); |
| 2020 | __ LoadRR(right, scratch1); |
| 2021 | break; |
| 2022 | } |
| 2023 | case Token::SUB: { |
| 2024 | __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
| 2025 | __ BranchOnOverflow(&stub_call); |
| 2026 | __ LoadRR(right, scratch1); |
| 2027 | break; |
| 2028 | } |
| 2029 | case Token::MUL: { |
| 2030 | Label mul_zero; |
| 2031 | #if V8_TARGET_ARCH_S390X |
| 2032 | // Remove tag from both operands. |
| 2033 | __ SmiUntag(ip, right); |
| 2034 | __ SmiUntag(scratch2, left); |
| 2035 | __ mr_z(scratch1, ip); |
| 2036 | // Check for overflowing the smi range - no overflow if higher 33 bits of |
| 2037 | // the result are identical. |
| 2038 | __ lr(ip, scratch2); // 32 bit load |
| 2039 | __ sra(ip, Operand(31)); |
| 2040 | __ cr_z(ip, scratch1); // 32 bit compare |
| 2041 | __ bne(&stub_call); |
| 2042 | #else |
| 2043 | __ SmiUntag(ip, right); |
| 2044 | __ LoadRR(scratch2, left); // load into low order of reg pair |
| 2045 | __ mr_z(scratch1, ip); // R4:R5 = R5 * ip |
| 2046 | // Check for overflowing the smi range - no overflow if higher 33 bits of |
| 2047 | // the result are identical. |
| 2048 | __ TestIfInt32(scratch1, scratch2, ip); |
| 2049 | __ bne(&stub_call); |
| 2050 | #endif |
| 2051 | // Go slow on zero result to handle -0. |
| 2052 | __ chi(scratch2, Operand::Zero()); |
| 2053 | __ beq(&mul_zero, Label::kNear); |
| 2054 | #if V8_TARGET_ARCH_S390X |
| 2055 | __ SmiTag(right, scratch2); |
| 2056 | #else |
| 2057 | __ LoadRR(right, scratch2); |
| 2058 | #endif |
| 2059 | __ b(&done); |
| 2060 | // We need -0 if we were multiplying a negative number with 0 to get 0. |
| 2061 | // We know one of them was zero. |
| 2062 | __ bind(&mul_zero); |
| 2063 | __ AddP(scratch2, right, left); |
| 2064 | __ CmpP(scratch2, Operand::Zero()); |
| 2065 | __ blt(&stub_call); |
| 2066 | __ LoadSmiLiteral(right, Smi::FromInt(0)); |
| 2067 | break; |
| 2068 | } |
| 2069 | case Token::BIT_OR: |
| 2070 | __ OrP(right, left); |
| 2071 | break; |
| 2072 | case Token::BIT_AND: |
| 2073 | __ AndP(right, left); |
| 2074 | break; |
| 2075 | case Token::BIT_XOR: |
| 2076 | __ XorP(right, left); |
| 2077 | break; |
| 2078 | default: |
| 2079 | UNREACHABLE(); |
| 2080 | } |
| 2081 | |
| 2082 | __ bind(&done); |
| 2083 | context()->Plug(r2); |
| 2084 | } |
| 2085 | |
| 2086 | void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { |
| 2087 | for (int i = 0; i < lit->properties()->length(); i++) { |
| 2088 | ObjectLiteral::Property* property = lit->properties()->at(i); |
| 2089 | Expression* value = property->value(); |
| 2090 | |
| 2091 | Register scratch = r3; |
| 2092 | if (property->is_static()) { |
| 2093 | __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor |
| 2094 | } else { |
| 2095 | __ LoadP(scratch, MemOperand(sp, 0)); // prototype |
| 2096 | } |
| 2097 | PushOperand(scratch); |
| 2098 | EmitPropertyKey(property, lit->GetIdForProperty(i)); |
| 2099 | |
| 2100 | // The static prototype property is read only. We handle the non computed |
| 2101 | // property name case in the parser. Since this is the only case where we |
| 2102 | // need to check for an own read only property we special case this so we do |
| 2103 | // not need to do this for every property. |
| 2104 | if (property->is_static() && property->is_computed_name()) { |
| 2105 | __ CallRuntime(Runtime::kThrowIfStaticPrototype); |
| 2106 | __ push(r2); |
| 2107 | } |
| 2108 | |
| 2109 | VisitForStackValue(value); |
| 2110 | if (NeedsHomeObject(value)) { |
| 2111 | EmitSetHomeObject(value, 2, property->GetSlot()); |
| 2112 | } |
| 2113 | |
| 2114 | switch (property->kind()) { |
| 2115 | case ObjectLiteral::Property::CONSTANT: |
| 2116 | case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 2117 | case ObjectLiteral::Property::PROTOTYPE: |
| 2118 | UNREACHABLE(); |
| 2119 | case ObjectLiteral::Property::COMPUTED: |
| 2120 | PushOperand(Smi::FromInt(DONT_ENUM)); |
| 2121 | PushOperand(Smi::FromInt(property->NeedsSetFunctionName())); |
| 2122 | CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral); |
| 2123 | break; |
| 2124 | |
| 2125 | case ObjectLiteral::Property::GETTER: |
| 2126 | PushOperand(Smi::FromInt(DONT_ENUM)); |
| 2127 | CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked); |
| 2128 | break; |
| 2129 | |
| 2130 | case ObjectLiteral::Property::SETTER: |
| 2131 | PushOperand(Smi::FromInt(DONT_ENUM)); |
| 2132 | CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
| 2133 | break; |
| 2134 | |
| 2135 | default: |
| 2136 | UNREACHABLE(); |
| 2137 | } |
| 2138 | } |
| 2139 | } |
| 2140 | |
| 2141 | void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { |
| 2142 | PopOperand(r3); |
| 2143 | Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
| 2144 | JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. |
| 2145 | CallIC(code, expr->BinaryOperationFeedbackId()); |
| 2146 | patch_site.EmitPatchInfo(); |
| 2147 | context()->Plug(r2); |
| 2148 | } |
| 2149 | |
| 2150 | void FullCodeGenerator::EmitAssignment(Expression* expr, |
| 2151 | FeedbackVectorSlot slot) { |
| 2152 | DCHECK(expr->IsValidReferenceExpressionOrThis()); |
| 2153 | |
| 2154 | Property* prop = expr->AsProperty(); |
| 2155 | LhsKind assign_type = Property::GetAssignType(prop); |
| 2156 | |
| 2157 | switch (assign_type) { |
| 2158 | case VARIABLE: { |
| 2159 | Variable* var = expr->AsVariableProxy()->var(); |
| 2160 | EffectContext context(this); |
| 2161 | EmitVariableAssignment(var, Token::ASSIGN, slot); |
| 2162 | break; |
| 2163 | } |
| 2164 | case NAMED_PROPERTY: { |
| 2165 | PushOperand(r2); // Preserve value. |
| 2166 | VisitForAccumulatorValue(prop->obj()); |
| 2167 | __ Move(StoreDescriptor::ReceiverRegister(), r2); |
| 2168 | PopOperand(StoreDescriptor::ValueRegister()); // Restore value. |
| 2169 | __ mov(StoreDescriptor::NameRegister(), |
| 2170 | Operand(prop->key()->AsLiteral()->value())); |
| 2171 | EmitLoadStoreICSlot(slot); |
| 2172 | CallStoreIC(); |
| 2173 | break; |
| 2174 | } |
| 2175 | case NAMED_SUPER_PROPERTY: { |
| 2176 | PushOperand(r2); |
| 2177 | VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 2178 | VisitForAccumulatorValue( |
| 2179 | prop->obj()->AsSuperPropertyReference()->home_object()); |
| 2180 | // stack: value, this; r2: home_object |
| 2181 | Register scratch = r4; |
| 2182 | Register scratch2 = r5; |
| 2183 | __ LoadRR(scratch, result_register()); // home_object |
| 2184 | __ LoadP(r2, MemOperand(sp, kPointerSize)); // value |
| 2185 | __ LoadP(scratch2, MemOperand(sp, 0)); // this |
| 2186 | __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this |
| 2187 | __ StoreP(scratch, MemOperand(sp, 0)); // home_object |
| 2188 | // stack: this, home_object; r2: value |
| 2189 | EmitNamedSuperPropertyStore(prop); |
| 2190 | break; |
| 2191 | } |
| 2192 | case KEYED_SUPER_PROPERTY: { |
| 2193 | PushOperand(r2); |
| 2194 | VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 2195 | VisitForStackValue( |
| 2196 | prop->obj()->AsSuperPropertyReference()->home_object()); |
| 2197 | VisitForAccumulatorValue(prop->key()); |
| 2198 | Register scratch = r4; |
| 2199 | Register scratch2 = r5; |
| 2200 | __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value |
| 2201 | // stack: value, this, home_object; r3: key, r6: value |
| 2202 | __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this |
| 2203 | __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize)); |
| 2204 | __ LoadP(scratch, MemOperand(sp, 0)); // home_object |
| 2205 | __ StoreP(scratch, MemOperand(sp, kPointerSize)); |
| 2206 | __ StoreP(r2, MemOperand(sp, 0)); |
| 2207 | __ Move(r2, scratch2); |
| 2208 | // stack: this, home_object, key; r2: value. |
| 2209 | EmitKeyedSuperPropertyStore(prop); |
| 2210 | break; |
| 2211 | } |
| 2212 | case KEYED_PROPERTY: { |
| 2213 | PushOperand(r2); // Preserve value. |
| 2214 | VisitForStackValue(prop->obj()); |
| 2215 | VisitForAccumulatorValue(prop->key()); |
| 2216 | __ Move(StoreDescriptor::NameRegister(), r2); |
| 2217 | PopOperands(StoreDescriptor::ValueRegister(), |
| 2218 | StoreDescriptor::ReceiverRegister()); |
| 2219 | EmitLoadStoreICSlot(slot); |
| 2220 | Handle<Code> ic = |
| 2221 | CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 2222 | CallIC(ic); |
| 2223 | break; |
| 2224 | } |
| 2225 | } |
| 2226 | context()->Plug(r2); |
| 2227 | } |
| 2228 | |
| 2229 | void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( |
| 2230 | Variable* var, MemOperand location) { |
| 2231 | __ StoreP(result_register(), location); |
| 2232 | if (var->IsContextSlot()) { |
| 2233 | // RecordWrite may destroy all its register arguments. |
| 2234 | __ LoadRR(r5, result_register()); |
| 2235 | int offset = Context::SlotOffset(var->index()); |
| 2236 | __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved, |
| 2237 | kDontSaveFPRegs); |
| 2238 | } |
| 2239 | } |
| 2240 | |
| 2241 | void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, |
| 2242 | FeedbackVectorSlot slot) { |
| 2243 | if (var->IsUnallocated()) { |
| 2244 | // Global var, const, or let. |
| 2245 | __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); |
| 2246 | __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); |
| 2247 | EmitLoadStoreICSlot(slot); |
| 2248 | CallStoreIC(); |
| 2249 | |
| 2250 | } else if (var->mode() == LET && op != Token::INIT) { |
| 2251 | // Non-initializing assignment to let variable needs a write barrier. |
| 2252 | DCHECK(!var->IsLookupSlot()); |
| 2253 | DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2254 | Label assign; |
| 2255 | MemOperand location = VarOperand(var, r3); |
| 2256 | __ LoadP(r5, location); |
| 2257 | __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2258 | __ bne(&assign); |
| 2259 | __ mov(r5, Operand(var->name())); |
| 2260 | __ push(r5); |
| 2261 | __ CallRuntime(Runtime::kThrowReferenceError); |
| 2262 | // Perform the assignment. |
| 2263 | __ bind(&assign); |
| 2264 | EmitStoreToStackLocalOrContextSlot(var, location); |
| 2265 | |
| 2266 | } else if (var->mode() == CONST && op != Token::INIT) { |
| 2267 | // Assignment to const variable needs a write barrier. |
| 2268 | DCHECK(!var->IsLookupSlot()); |
| 2269 | DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2270 | Label const_error; |
| 2271 | MemOperand location = VarOperand(var, r3); |
| 2272 | __ LoadP(r5, location); |
| 2273 | __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2274 | __ bne(&const_error, Label::kNear); |
| 2275 | __ mov(r5, Operand(var->name())); |
| 2276 | __ push(r5); |
| 2277 | __ CallRuntime(Runtime::kThrowReferenceError); |
| 2278 | __ bind(&const_error); |
| 2279 | __ CallRuntime(Runtime::kThrowConstAssignError); |
| 2280 | |
| 2281 | } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { |
| 2282 | // Initializing assignment to const {this} needs a write barrier. |
| 2283 | DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2284 | Label uninitialized_this; |
| 2285 | MemOperand location = VarOperand(var, r3); |
| 2286 | __ LoadP(r5, location); |
| 2287 | __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2288 | __ beq(&uninitialized_this); |
| 2289 | __ mov(r3, Operand(var->name())); |
| 2290 | __ push(r3); |
| 2291 | __ CallRuntime(Runtime::kThrowReferenceError); |
| 2292 | __ bind(&uninitialized_this); |
| 2293 | EmitStoreToStackLocalOrContextSlot(var, location); |
| 2294 | |
| 2295 | } else if (!var->is_const_mode() || |
| 2296 | (var->mode() == CONST && op == Token::INIT)) { |
| 2297 | if (var->IsLookupSlot()) { |
| 2298 | // Assignment to var. |
| 2299 | __ Push(var->name()); |
| 2300 | __ Push(r2); |
| 2301 | __ CallRuntime(is_strict(language_mode()) |
| 2302 | ? Runtime::kStoreLookupSlot_Strict |
| 2303 | : Runtime::kStoreLookupSlot_Sloppy); |
| 2304 | } else { |
| 2305 | // Assignment to var or initializing assignment to let/const in harmony |
| 2306 | // mode. |
| 2307 | DCHECK((var->IsStackAllocated() || var->IsContextSlot())); |
| 2308 | MemOperand location = VarOperand(var, r3); |
| 2309 | if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { |
| 2310 | // Check for an uninitialized let binding. |
| 2311 | __ LoadP(r4, location); |
| 2312 | __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
| 2313 | __ Check(eq, kLetBindingReInitialization); |
| 2314 | } |
| 2315 | EmitStoreToStackLocalOrContextSlot(var, location); |
| 2316 | } |
| 2317 | } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { |
| 2318 | // Const initializers need a write barrier. |
| 2319 | DCHECK(!var->IsParameter()); // No const parameters. |
| 2320 | if (var->IsLookupSlot()) { |
| 2321 | __ push(r2); |
| 2322 | __ mov(r2, Operand(var->name())); |
| 2323 | __ Push(cp, r2); // Context and name. |
| 2324 | __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); |
| 2325 | } else { |
| 2326 | DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2327 | Label skip; |
| 2328 | MemOperand location = VarOperand(var, r3); |
| 2329 | __ LoadP(r4, location); |
| 2330 | __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
| 2331 | __ bne(&skip); |
| 2332 | EmitStoreToStackLocalOrContextSlot(var, location); |
| 2333 | __ bind(&skip); |
| 2334 | } |
| 2335 | |
| 2336 | } else { |
| 2337 | DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); |
| 2338 | if (is_strict(language_mode())) { |
| 2339 | __ CallRuntime(Runtime::kThrowConstAssignError); |
| 2340 | } |
| 2341 | // Silently ignore store in sloppy mode. |
| 2342 | } |
| 2343 | } |
| 2344 | |
| 2345 | void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
| 2346 | // Assignment to a property, using a named store IC. |
| 2347 | Property* prop = expr->target()->AsProperty(); |
| 2348 | DCHECK(prop != NULL); |
| 2349 | DCHECK(prop->key()->IsLiteral()); |
| 2350 | |
| 2351 | __ mov(StoreDescriptor::NameRegister(), |
| 2352 | Operand(prop->key()->AsLiteral()->value())); |
| 2353 | PopOperand(StoreDescriptor::ReceiverRegister()); |
| 2354 | EmitLoadStoreICSlot(expr->AssignmentSlot()); |
| 2355 | CallStoreIC(); |
| 2356 | |
| 2357 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2358 | context()->Plug(r2); |
| 2359 | } |
| 2360 | |
| 2361 | void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { |
| 2362 | // Assignment to named property of super. |
| 2363 | // r2 : value |
| 2364 | // stack : receiver ('this'), home_object |
| 2365 | DCHECK(prop != NULL); |
| 2366 | Literal* key = prop->key()->AsLiteral(); |
| 2367 | DCHECK(key != NULL); |
| 2368 | |
| 2369 | PushOperand(key->value()); |
| 2370 | PushOperand(r2); |
| 2371 | CallRuntimeWithOperands((is_strict(language_mode()) |
| 2372 | ? Runtime::kStoreToSuper_Strict |
| 2373 | : Runtime::kStoreToSuper_Sloppy)); |
| 2374 | } |
| 2375 | |
| 2376 | void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { |
| 2377 | // Assignment to named property of super. |
| 2378 | // r2 : value |
| 2379 | // stack : receiver ('this'), home_object, key |
| 2380 | DCHECK(prop != NULL); |
| 2381 | |
| 2382 | PushOperand(r2); |
| 2383 | CallRuntimeWithOperands((is_strict(language_mode()) |
| 2384 | ? Runtime::kStoreKeyedToSuper_Strict |
| 2385 | : Runtime::kStoreKeyedToSuper_Sloppy)); |
| 2386 | } |
| 2387 | |
| 2388 | void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
| 2389 | // Assignment to a property, using a keyed store IC. |
| 2390 | PopOperands(StoreDescriptor::ReceiverRegister(), |
| 2391 | StoreDescriptor::NameRegister()); |
| 2392 | DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
| 2393 | |
| 2394 | Handle<Code> ic = |
| 2395 | CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 2396 | EmitLoadStoreICSlot(expr->AssignmentSlot()); |
| 2397 | CallIC(ic); |
| 2398 | |
| 2399 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2400 | context()->Plug(r2); |
| 2401 | } |
| 2402 | |
| 2403 | void FullCodeGenerator::VisitProperty(Property* expr) { |
| 2404 | Comment cmnt(masm_, "[ Property"); |
| 2405 | SetExpressionPosition(expr); |
| 2406 | |
| 2407 | Expression* key = expr->key(); |
| 2408 | |
| 2409 | if (key->IsPropertyName()) { |
| 2410 | if (!expr->IsSuperAccess()) { |
| 2411 | VisitForAccumulatorValue(expr->obj()); |
| 2412 | __ Move(LoadDescriptor::ReceiverRegister(), r2); |
| 2413 | EmitNamedPropertyLoad(expr); |
| 2414 | } else { |
| 2415 | VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
| 2416 | VisitForStackValue( |
| 2417 | expr->obj()->AsSuperPropertyReference()->home_object()); |
| 2418 | EmitNamedSuperPropertyLoad(expr); |
| 2419 | } |
| 2420 | } else { |
| 2421 | if (!expr->IsSuperAccess()) { |
| 2422 | VisitForStackValue(expr->obj()); |
| 2423 | VisitForAccumulatorValue(expr->key()); |
| 2424 | __ Move(LoadDescriptor::NameRegister(), r2); |
| 2425 | PopOperand(LoadDescriptor::ReceiverRegister()); |
| 2426 | EmitKeyedPropertyLoad(expr); |
| 2427 | } else { |
| 2428 | VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
| 2429 | VisitForStackValue( |
| 2430 | expr->obj()->AsSuperPropertyReference()->home_object()); |
| 2431 | VisitForStackValue(expr->key()); |
| 2432 | EmitKeyedSuperPropertyLoad(expr); |
| 2433 | } |
| 2434 | } |
| 2435 | PrepareForBailoutForId(expr->LoadId(), TOS_REG); |
| 2436 | context()->Plug(r2); |
| 2437 | } |
| 2438 | |
| 2439 | void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { |
| 2440 | ic_total_count_++; |
| 2441 | __ Call(code, RelocInfo::CODE_TARGET, ast_id); |
| 2442 | } |
| 2443 | |
| 2444 | // Code common for calls using the IC. |
| 2445 | void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { |
| 2446 | Expression* callee = expr->expression(); |
| 2447 | |
| 2448 | // Get the target function. |
| 2449 | ConvertReceiverMode convert_mode; |
| 2450 | if (callee->IsVariableProxy()) { |
| 2451 | { |
| 2452 | StackValueContext context(this); |
| 2453 | EmitVariableLoad(callee->AsVariableProxy()); |
| 2454 | PrepareForBailout(callee, NO_REGISTERS); |
| 2455 | } |
| 2456 | // Push undefined as receiver. This is patched in the method prologue if it |
| 2457 | // is a sloppy mode method. |
| 2458 | __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
| 2459 | PushOperand(r1); |
| 2460 | convert_mode = ConvertReceiverMode::kNullOrUndefined; |
| 2461 | } else { |
| 2462 | // Load the function from the receiver. |
| 2463 | DCHECK(callee->IsProperty()); |
| 2464 | DCHECK(!callee->AsProperty()->IsSuperAccess()); |
| 2465 | __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 2466 | EmitNamedPropertyLoad(callee->AsProperty()); |
| 2467 | PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2468 | // Push the target function under the receiver. |
| 2469 | __ LoadP(r1, MemOperand(sp, 0)); |
| 2470 | PushOperand(r1); |
| 2471 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2472 | convert_mode = ConvertReceiverMode::kNotNullOrUndefined; |
| 2473 | } |
| 2474 | |
| 2475 | EmitCall(expr, convert_mode); |
| 2476 | } |
| 2477 | |
| 2478 | void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { |
| 2479 | Expression* callee = expr->expression(); |
| 2480 | DCHECK(callee->IsProperty()); |
| 2481 | Property* prop = callee->AsProperty(); |
| 2482 | DCHECK(prop->IsSuperAccess()); |
| 2483 | SetExpressionPosition(prop); |
| 2484 | |
| 2485 | Literal* key = prop->key()->AsLiteral(); |
| 2486 | DCHECK(!key->value()->IsSmi()); |
| 2487 | // Load the function from the receiver. |
| 2488 | const Register scratch = r3; |
| 2489 | SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
| 2490 | VisitForAccumulatorValue(super_ref->home_object()); |
| 2491 | __ LoadRR(scratch, r2); |
| 2492 | VisitForAccumulatorValue(super_ref->this_var()); |
| 2493 | PushOperands(scratch, r2, r2, scratch); |
| 2494 | PushOperand(key->value()); |
| 2495 | |
| 2496 | // Stack here: |
| 2497 | // - home_object |
| 2498 | // - this (receiver) |
| 2499 | // - this (receiver) <-- LoadFromSuper will pop here and below. |
| 2500 | // - home_object |
| 2501 | // - key |
| 2502 | CallRuntimeWithOperands(Runtime::kLoadFromSuper); |
| 2503 | |
| 2504 | // Replace home_object with target function. |
| 2505 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2506 | |
| 2507 | // Stack here: |
| 2508 | // - target function |
| 2509 | // - this (receiver) |
| 2510 | EmitCall(expr); |
| 2511 | } |
| 2512 | |
| 2513 | // Code common for calls using the IC. |
| 2514 | void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { |
| 2515 | // Load the key. |
| 2516 | VisitForAccumulatorValue(key); |
| 2517 | |
| 2518 | Expression* callee = expr->expression(); |
| 2519 | |
| 2520 | // Load the function from the receiver. |
| 2521 | DCHECK(callee->IsProperty()); |
| 2522 | __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 2523 | __ Move(LoadDescriptor::NameRegister(), r2); |
| 2524 | EmitKeyedPropertyLoad(callee->AsProperty()); |
| 2525 | PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2526 | |
| 2527 | // Push the target function under the receiver. |
| 2528 | __ LoadP(ip, MemOperand(sp, 0)); |
| 2529 | PushOperand(ip); |
| 2530 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2531 | |
| 2532 | EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); |
| 2533 | } |
| 2534 | |
| 2535 | void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { |
| 2536 | Expression* callee = expr->expression(); |
| 2537 | DCHECK(callee->IsProperty()); |
| 2538 | Property* prop = callee->AsProperty(); |
| 2539 | DCHECK(prop->IsSuperAccess()); |
| 2540 | |
| 2541 | SetExpressionPosition(prop); |
| 2542 | // Load the function from the receiver. |
| 2543 | const Register scratch = r3; |
| 2544 | SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
| 2545 | VisitForAccumulatorValue(super_ref->home_object()); |
| 2546 | __ LoadRR(scratch, r2); |
| 2547 | VisitForAccumulatorValue(super_ref->this_var()); |
| 2548 | PushOperands(scratch, r2, r2, scratch); |
| 2549 | VisitForStackValue(prop->key()); |
| 2550 | |
| 2551 | // Stack here: |
| 2552 | // - home_object |
| 2553 | // - this (receiver) |
| 2554 | // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. |
| 2555 | // - home_object |
| 2556 | // - key |
| 2557 | CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); |
| 2558 | |
| 2559 | // Replace home_object with target function. |
| 2560 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2561 | |
| 2562 | // Stack here: |
| 2563 | // - target function |
| 2564 | // - this (receiver) |
| 2565 | EmitCall(expr); |
| 2566 | } |
| 2567 | |
| 2568 | void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { |
| 2569 | // Load the arguments. |
| 2570 | ZoneList<Expression*>* args = expr->arguments(); |
| 2571 | int arg_count = args->length(); |
| 2572 | for (int i = 0; i < arg_count; i++) { |
| 2573 | VisitForStackValue(args->at(i)); |
| 2574 | } |
| 2575 | |
| 2576 | PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 2577 | SetCallPosition(expr, expr->tail_call_mode()); |
| 2578 | if (expr->tail_call_mode() == TailCallMode::kAllow) { |
| 2579 | if (FLAG_trace) { |
| 2580 | __ CallRuntime(Runtime::kTraceTailCall); |
| 2581 | } |
| 2582 | // Update profiling counters before the tail call since we will |
| 2583 | // not return to this function. |
| 2584 | EmitProfilingCounterHandlingForReturnSequence(true); |
| 2585 | } |
| 2586 | Handle<Code> ic = |
| 2587 | CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) |
| 2588 | .code(); |
| 2589 | __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot())); |
| 2590 | __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2591 | // Don't assign a type feedback id to the IC, since type feedback is provided |
| 2592 | // by the vector above. |
| 2593 | CallIC(ic); |
| 2594 | OperandStackDepthDecrement(arg_count + 1); |
| 2595 | |
| 2596 | RecordJSReturnSite(expr); |
| 2597 | // Restore context register. |
| 2598 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2599 | context()->DropAndPlug(1, r2); |
| 2600 | } |
| 2601 | |
| 2602 | void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
| 2603 | // r6: copy of the first argument or undefined if it doesn't exist. |
| 2604 | if (arg_count > 0) { |
| 2605 | __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0); |
| 2606 | } else { |
| 2607 | __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 2608 | } |
| 2609 | |
| 2610 | // r5: the receiver of the enclosing function. |
| 2611 | __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2612 | |
| 2613 | // r4: language mode. |
| 2614 | __ LoadSmiLiteral(r4, Smi::FromInt(language_mode())); |
| 2615 | |
| 2616 | // r3: the start position of the scope the calls resides in. |
| 2617 | __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position())); |
| 2618 | |
| 2619 | // Do the runtime call. |
| 2620 | __ Push(r6, r5, r4, r3); |
| 2621 | __ CallRuntime(Runtime::kResolvePossiblyDirectEval); |
| 2622 | } |
| 2623 | |
| 2624 | // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. |
| 2625 | void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { |
| 2626 | VariableProxy* callee = expr->expression()->AsVariableProxy(); |
| 2627 | if (callee->var()->IsLookupSlot()) { |
| 2628 | Label slow, done; |
| 2629 | SetExpressionPosition(callee); |
| 2630 | // Generate code for loading from variables potentially shadowed by |
| 2631 | // eval-introduced variables. |
| 2632 | EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); |
| 2633 | |
| 2634 | __ bind(&slow); |
| 2635 | // Call the runtime to find the function to call (returned in r2) and |
| 2636 | // the object holding it (returned in r3). |
| 2637 | __ Push(callee->name()); |
| 2638 | __ CallRuntime(Runtime::kLoadLookupSlotForCall); |
| 2639 | PushOperands(r2, r3); // Function, receiver. |
| 2640 | PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); |
| 2641 | |
| 2642 | // If fast case code has been generated, emit code to push the function |
| 2643 | // and receiver and have the slow path jump around this code. |
| 2644 | if (done.is_linked()) { |
| 2645 | Label call; |
| 2646 | __ b(&call); |
| 2647 | __ bind(&done); |
| 2648 | // Push function. |
| 2649 | __ push(r2); |
| 2650 | // Pass undefined as the receiver, which is the WithBaseObject of a |
| 2651 | // non-object environment record. If the callee is sloppy, it will patch |
| 2652 | // it up to be the global receiver. |
| 2653 | __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 2654 | __ push(r3); |
| 2655 | __ bind(&call); |
| 2656 | } |
| 2657 | } else { |
| 2658 | VisitForStackValue(callee); |
| 2659 | // refEnv.WithBaseObject() |
| 2660 | __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 2661 | PushOperand(r4); // Reserved receiver slot. |
| 2662 | } |
| 2663 | } |
| 2664 | |
| 2665 | void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { |
| 2666 | // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval |
| 2667 | // to resolve the function we need to call. Then we call the resolved |
| 2668 | // function using the given arguments. |
| 2669 | ZoneList<Expression*>* args = expr->arguments(); |
| 2670 | int arg_count = args->length(); |
| 2671 | |
| 2672 | PushCalleeAndWithBaseObject(expr); |
| 2673 | |
| 2674 | // Push the arguments. |
| 2675 | for (int i = 0; i < arg_count; i++) { |
| 2676 | VisitForStackValue(args->at(i)); |
| 2677 | } |
| 2678 | |
| 2679 | // Push a copy of the function (found below the arguments) and |
| 2680 | // resolve eval. |
| 2681 | __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2682 | __ push(r3); |
| 2683 | EmitResolvePossiblyDirectEval(arg_count); |
| 2684 | |
| 2685 | // Touch up the stack with the resolved function. |
| 2686 | __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2687 | |
| 2688 | PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); |
| 2689 | |
| 2690 | // Record source position for debugger. |
| 2691 | SetCallPosition(expr); |
| 2692 | __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2693 | __ mov(r2, Operand(arg_count)); |
| 2694 | __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 2695 | expr->tail_call_mode()), |
| 2696 | RelocInfo::CODE_TARGET); |
| 2697 | OperandStackDepthDecrement(arg_count + 1); |
| 2698 | RecordJSReturnSite(expr); |
| 2699 | // Restore context register. |
| 2700 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2701 | context()->DropAndPlug(1, r2); |
| 2702 | } |
| 2703 | |
| 2704 | void FullCodeGenerator::VisitCallNew(CallNew* expr) { |
| 2705 | Comment cmnt(masm_, "[ CallNew"); |
| 2706 | // According to ECMA-262, section 11.2.2, page 44, the function |
| 2707 | // expression in new calls must be evaluated before the |
| 2708 | // arguments. |
| 2709 | |
| 2710 | // Push constructor on the stack. If it's not a function it's used as |
| 2711 | // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
| 2712 | // ignored. |
| 2713 | DCHECK(!expr->expression()->IsSuperPropertyReference()); |
| 2714 | VisitForStackValue(expr->expression()); |
| 2715 | |
| 2716 | // Push the arguments ("left-to-right") on the stack. |
| 2717 | ZoneList<Expression*>* args = expr->arguments(); |
| 2718 | int arg_count = args->length(); |
| 2719 | for (int i = 0; i < arg_count; i++) { |
| 2720 | VisitForStackValue(args->at(i)); |
| 2721 | } |
| 2722 | |
| 2723 | // Call the construct call builtin that handles allocation and |
| 2724 | // constructor invocation. |
| 2725 | SetConstructCallPosition(expr); |
| 2726 | |
| 2727 | // Load function and argument count into r3 and r2. |
| 2728 | __ mov(r2, Operand(arg_count)); |
| 2729 | __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0); |
| 2730 | |
| 2731 | // Record call targets in unoptimized code. |
| 2732 | __ EmitLoadTypeFeedbackVector(r4); |
| 2733 | __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot())); |
| 2734 | |
| 2735 | CallConstructStub stub(isolate()); |
| 2736 | __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 2737 | OperandStackDepthDecrement(arg_count + 1); |
| 2738 | PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
| 2739 | // Restore context register. |
| 2740 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2741 | context()->Plug(r2); |
| 2742 | } |
| 2743 | |
| 2744 | void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { |
| 2745 | SuperCallReference* super_call_ref = |
| 2746 | expr->expression()->AsSuperCallReference(); |
| 2747 | DCHECK_NOT_NULL(super_call_ref); |
| 2748 | |
| 2749 | // Push the super constructor target on the stack (may be null, |
| 2750 | // but the Construct builtin can deal with that properly). |
| 2751 | VisitForAccumulatorValue(super_call_ref->this_function_var()); |
| 2752 | __ AssertFunction(result_register()); |
| 2753 | __ LoadP(result_register(), |
| 2754 | FieldMemOperand(result_register(), HeapObject::kMapOffset)); |
| 2755 | __ LoadP(result_register(), |
| 2756 | FieldMemOperand(result_register(), Map::kPrototypeOffset)); |
| 2757 | PushOperand(result_register()); |
| 2758 | |
| 2759 | // Push the arguments ("left-to-right") on the stack. |
| 2760 | ZoneList<Expression*>* args = expr->arguments(); |
| 2761 | int arg_count = args->length(); |
| 2762 | for (int i = 0; i < arg_count; i++) { |
| 2763 | VisitForStackValue(args->at(i)); |
| 2764 | } |
| 2765 | |
| 2766 | // Call the construct call builtin that handles allocation and |
| 2767 | // constructor invocation. |
| 2768 | SetConstructCallPosition(expr); |
| 2769 | |
| 2770 | // Load new target into r5. |
| 2771 | VisitForAccumulatorValue(super_call_ref->new_target_var()); |
| 2772 | __ LoadRR(r5, result_register()); |
| 2773 | |
| 2774 | // Load function and argument count into r1 and r0. |
| 2775 | __ mov(r2, Operand(arg_count)); |
| 2776 | __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize)); |
| 2777 | |
| 2778 | __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 2779 | OperandStackDepthDecrement(arg_count + 1); |
| 2780 | |
| 2781 | RecordJSReturnSite(expr); |
| 2782 | |
| 2783 | // Restore context register. |
| 2784 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2785 | context()->Plug(r2); |
| 2786 | } |
| 2787 | |
| 2788 | void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { |
| 2789 | ZoneList<Expression*>* args = expr->arguments(); |
| 2790 | DCHECK(args->length() == 1); |
| 2791 | |
| 2792 | VisitForAccumulatorValue(args->at(0)); |
| 2793 | |
| 2794 | Label materialize_true, materialize_false, skip_lookup; |
| 2795 | Label* if_true = NULL; |
| 2796 | Label* if_false = NULL; |
| 2797 | Label* fall_through = NULL; |
| 2798 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2799 | &if_false, &fall_through); |
| 2800 | |
| 2801 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2802 | __ TestIfSmi(r2); |
| 2803 | Split(eq, if_true, if_false, fall_through); |
| 2804 | |
| 2805 | context()->Plug(if_true, if_false); |
| 2806 | } |
| 2807 | |
| 2808 | void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { |
| 2809 | ZoneList<Expression*>* args = expr->arguments(); |
| 2810 | DCHECK(args->length() == 1); |
| 2811 | |
| 2812 | VisitForAccumulatorValue(args->at(0)); |
| 2813 | |
| 2814 | Label materialize_true, materialize_false; |
| 2815 | Label* if_true = NULL; |
| 2816 | Label* if_false = NULL; |
| 2817 | Label* fall_through = NULL; |
| 2818 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2819 | &if_false, &fall_through); |
| 2820 | |
| 2821 | __ JumpIfSmi(r2, if_false); |
| 2822 | __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
| 2823 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2824 | Split(ge, if_true, if_false, fall_through); |
| 2825 | |
| 2826 | context()->Plug(if_true, if_false); |
| 2827 | } |
| 2828 | |
| 2829 | void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { |
| 2830 | ZoneList<Expression*>* args = expr->arguments(); |
| 2831 | DCHECK(args->length() == 1); |
| 2832 | |
| 2833 | VisitForAccumulatorValue(args->at(0)); |
| 2834 | |
| 2835 | Label materialize_true, materialize_false; |
| 2836 | Label* if_true = NULL; |
| 2837 | Label* if_false = NULL; |
| 2838 | Label* fall_through = NULL; |
| 2839 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2840 | &if_false, &fall_through); |
| 2841 | |
| 2842 | __ JumpIfSmi(r2, if_false); |
| 2843 | __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE); |
| 2844 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2845 | Split(eq, if_true, if_false, fall_through); |
| 2846 | |
| 2847 | context()->Plug(if_true, if_false); |
| 2848 | } |
| 2849 | |
| 2850 | void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { |
| 2851 | ZoneList<Expression*>* args = expr->arguments(); |
| 2852 | DCHECK(args->length() == 1); |
| 2853 | |
| 2854 | VisitForAccumulatorValue(args->at(0)); |
| 2855 | |
| 2856 | Label materialize_true, materialize_false; |
| 2857 | Label* if_true = NULL; |
| 2858 | Label* if_false = NULL; |
| 2859 | Label* fall_through = NULL; |
| 2860 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2861 | &if_false, &fall_through); |
| 2862 | |
| 2863 | __ JumpIfSmi(r2, if_false); |
| 2864 | __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE); |
| 2865 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2866 | Split(eq, if_true, if_false, fall_through); |
| 2867 | |
| 2868 | context()->Plug(if_true, if_false); |
| 2869 | } |
| 2870 | |
| 2871 | void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { |
| 2872 | ZoneList<Expression*>* args = expr->arguments(); |
| 2873 | DCHECK(args->length() == 1); |
| 2874 | |
| 2875 | VisitForAccumulatorValue(args->at(0)); |
| 2876 | |
| 2877 | Label materialize_true, materialize_false; |
| 2878 | Label* if_true = NULL; |
| 2879 | Label* if_false = NULL; |
| 2880 | Label* fall_through = NULL; |
| 2881 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2882 | &if_false, &fall_through); |
| 2883 | |
| 2884 | __ JumpIfSmi(r2, if_false); |
| 2885 | __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE); |
| 2886 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2887 | Split(eq, if_true, if_false, fall_through); |
| 2888 | |
| 2889 | context()->Plug(if_true, if_false); |
| 2890 | } |
| 2891 | |
| 2892 | void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { |
| 2893 | ZoneList<Expression*>* args = expr->arguments(); |
| 2894 | DCHECK(args->length() == 1); |
| 2895 | |
| 2896 | VisitForAccumulatorValue(args->at(0)); |
| 2897 | |
| 2898 | Label materialize_true, materialize_false; |
| 2899 | Label* if_true = NULL; |
| 2900 | Label* if_false = NULL; |
| 2901 | Label* fall_through = NULL; |
| 2902 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2903 | &if_false, &fall_through); |
| 2904 | |
| 2905 | __ JumpIfSmi(r2, if_false); |
| 2906 | __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE); |
| 2907 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2908 | Split(eq, if_true, if_false, fall_through); |
| 2909 | |
| 2910 | context()->Plug(if_true, if_false); |
| 2911 | } |
| 2912 | |
| 2913 | void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
| 2914 | ZoneList<Expression*>* args = expr->arguments(); |
| 2915 | DCHECK(args->length() == 1); |
| 2916 | Label done, null, function, non_function_constructor; |
| 2917 | |
| 2918 | VisitForAccumulatorValue(args->at(0)); |
| 2919 | |
| 2920 | // If the object is not a JSReceiver, we return null. |
| 2921 | __ JumpIfSmi(r2, &null); |
| 2922 | STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 2923 | __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
| 2924 | // Map is now in r2. |
| 2925 | __ blt(&null); |
| 2926 | |
| 2927 | // Return 'Function' for JSFunction and JSBoundFunction objects. |
| 2928 | __ CmpLogicalP(r3, Operand(FIRST_FUNCTION_TYPE)); |
| 2929 | STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE); |
| 2930 | __ bge(&function); |
| 2931 | |
| 2932 | // Check if the constructor in the map is a JS function. |
| 2933 | Register instance_type = r4; |
| 2934 | __ GetMapConstructor(r2, r2, r3, instance_type); |
| 2935 | __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE)); |
| 2936 | __ bne(&non_function_constructor, Label::kNear); |
| 2937 | |
| 2938 | // r2 now contains the constructor function. Grab the |
| 2939 | // instance class name from there. |
| 2940 | __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset)); |
| 2941 | __ LoadP(r2, |
| 2942 | FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset)); |
| 2943 | __ b(&done, Label::kNear); |
| 2944 | |
| 2945 | // Functions have class 'Function'. |
| 2946 | __ bind(&function); |
| 2947 | __ LoadRoot(r2, Heap::kFunction_stringRootIndex); |
| 2948 | __ b(&done, Label::kNear); |
| 2949 | |
| 2950 | // Objects with a non-function constructor have class 'Object'. |
| 2951 | __ bind(&non_function_constructor); |
| 2952 | __ LoadRoot(r2, Heap::kObject_stringRootIndex); |
| 2953 | __ b(&done, Label::kNear); |
| 2954 | |
| 2955 | // Non-JS objects have class null. |
| 2956 | __ bind(&null); |
| 2957 | __ LoadRoot(r2, Heap::kNullValueRootIndex); |
| 2958 | |
| 2959 | // All done. |
| 2960 | __ bind(&done); |
| 2961 | |
| 2962 | context()->Plug(r2); |
| 2963 | } |
| 2964 | |
| 2965 | void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { |
| 2966 | ZoneList<Expression*>* args = expr->arguments(); |
| 2967 | DCHECK(args->length() == 1); |
| 2968 | VisitForAccumulatorValue(args->at(0)); // Load the object. |
| 2969 | |
| 2970 | Label done; |
| 2971 | // If the object is a smi return the object. |
| 2972 | __ JumpIfSmi(r2, &done); |
| 2973 | // If the object is not a value type, return the object. |
| 2974 | __ CompareObjectType(r2, r3, r3, JS_VALUE_TYPE); |
| 2975 | __ bne(&done, Label::kNear); |
| 2976 | __ LoadP(r2, FieldMemOperand(r2, JSValue::kValueOffset)); |
| 2977 | |
| 2978 | __ bind(&done); |
| 2979 | context()->Plug(r2); |
| 2980 | } |
| 2981 | |
| 2982 | void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { |
| 2983 | ZoneList<Expression*>* args = expr->arguments(); |
| 2984 | DCHECK_EQ(3, args->length()); |
| 2985 | |
| 2986 | Register string = r2; |
| 2987 | Register index = r3; |
| 2988 | Register value = r4; |
| 2989 | |
| 2990 | VisitForStackValue(args->at(0)); // index |
| 2991 | VisitForStackValue(args->at(1)); // value |
| 2992 | VisitForAccumulatorValue(args->at(2)); // string |
| 2993 | PopOperands(index, value); |
| 2994 | |
| 2995 | if (FLAG_debug_code) { |
| 2996 | __ TestIfSmi(value); |
| 2997 | __ Check(eq, kNonSmiValue, cr0); |
| 2998 | __ TestIfSmi(index); |
| 2999 | __ Check(eq, kNonSmiIndex, cr0); |
| 3000 | __ SmiUntag(index); |
| 3001 | static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 3002 | __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); |
| 3003 | __ SmiTag(index); |
| 3004 | } |
| 3005 | |
| 3006 | __ SmiUntag(value); |
| 3007 | __ AddP(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
| 3008 | __ SmiToByteArrayOffset(r1, index); |
| 3009 | __ StoreByte(value, MemOperand(ip, r1)); |
| 3010 | context()->Plug(string); |
| 3011 | } |
| 3012 | |
| 3013 | void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { |
| 3014 | ZoneList<Expression*>* args = expr->arguments(); |
| 3015 | DCHECK_EQ(3, args->length()); |
| 3016 | |
| 3017 | Register string = r2; |
| 3018 | Register index = r3; |
| 3019 | Register value = r4; |
| 3020 | |
| 3021 | VisitForStackValue(args->at(0)); // index |
| 3022 | VisitForStackValue(args->at(1)); // value |
| 3023 | VisitForAccumulatorValue(args->at(2)); // string |
| 3024 | PopOperands(index, value); |
| 3025 | |
| 3026 | if (FLAG_debug_code) { |
| 3027 | __ TestIfSmi(value); |
| 3028 | __ Check(eq, kNonSmiValue, cr0); |
| 3029 | __ TestIfSmi(index); |
| 3030 | __ Check(eq, kNonSmiIndex, cr0); |
| 3031 | __ SmiUntag(index, index); |
| 3032 | static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 3033 | __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); |
| 3034 | __ SmiTag(index, index); |
| 3035 | } |
| 3036 | |
| 3037 | __ SmiUntag(value); |
| 3038 | __ SmiToShortArrayOffset(r1, index); |
| 3039 | __ StoreHalfWord(value, MemOperand(r1, string, SeqTwoByteString::kHeaderSize - |
| 3040 | kHeapObjectTag)); |
| 3041 | context()->Plug(string); |
| 3042 | } |
| 3043 | |
| 3044 | void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { |
| 3045 | ZoneList<Expression*>* args = expr->arguments(); |
| 3046 | DCHECK(args->length() == 1); |
| 3047 | VisitForAccumulatorValue(args->at(0)); |
| 3048 | |
| 3049 | Label done; |
| 3050 | StringCharFromCodeGenerator generator(r2, r3); |
| 3051 | generator.GenerateFast(masm_); |
| 3052 | __ b(&done); |
| 3053 | |
| 3054 | NopRuntimeCallHelper call_helper; |
| 3055 | generator.GenerateSlow(masm_, call_helper); |
| 3056 | |
| 3057 | __ bind(&done); |
| 3058 | context()->Plug(r3); |
| 3059 | } |
| 3060 | |
| 3061 | void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { |
| 3062 | ZoneList<Expression*>* args = expr->arguments(); |
| 3063 | DCHECK(args->length() == 2); |
| 3064 | VisitForStackValue(args->at(0)); |
| 3065 | VisitForAccumulatorValue(args->at(1)); |
| 3066 | |
| 3067 | Register object = r3; |
| 3068 | Register index = r2; |
| 3069 | Register result = r5; |
| 3070 | |
| 3071 | PopOperand(object); |
| 3072 | |
| 3073 | Label need_conversion; |
| 3074 | Label index_out_of_range; |
| 3075 | Label done; |
| 3076 | StringCharCodeAtGenerator generator(object, index, result, &need_conversion, |
| 3077 | &need_conversion, &index_out_of_range, |
| 3078 | STRING_INDEX_IS_NUMBER); |
| 3079 | generator.GenerateFast(masm_); |
| 3080 | __ b(&done); |
| 3081 | |
| 3082 | __ bind(&index_out_of_range); |
| 3083 | // When the index is out of range, the spec requires us to return |
| 3084 | // NaN. |
| 3085 | __ LoadRoot(result, Heap::kNanValueRootIndex); |
| 3086 | __ b(&done); |
| 3087 | |
| 3088 | __ bind(&need_conversion); |
| 3089 | // Load the undefined value into the result register, which will |
| 3090 | // trigger conversion. |
| 3091 | __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 3092 | __ b(&done); |
| 3093 | |
| 3094 | NopRuntimeCallHelper call_helper; |
| 3095 | generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
| 3096 | |
| 3097 | __ bind(&done); |
| 3098 | context()->Plug(result); |
| 3099 | } |
| 3100 | |
| 3101 | void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { |
| 3102 | ZoneList<Expression*>* args = expr->arguments(); |
| 3103 | DCHECK(args->length() == 2); |
| 3104 | VisitForStackValue(args->at(0)); |
| 3105 | VisitForAccumulatorValue(args->at(1)); |
| 3106 | |
| 3107 | Register object = r3; |
| 3108 | Register index = r2; |
| 3109 | Register scratch = r5; |
| 3110 | Register result = r2; |
| 3111 | |
| 3112 | PopOperand(object); |
| 3113 | |
| 3114 | Label need_conversion; |
| 3115 | Label index_out_of_range; |
| 3116 | Label done; |
| 3117 | StringCharAtGenerator generator(object, index, scratch, result, |
| 3118 | &need_conversion, &need_conversion, |
| 3119 | &index_out_of_range, STRING_INDEX_IS_NUMBER); |
| 3120 | generator.GenerateFast(masm_); |
| 3121 | __ b(&done); |
| 3122 | |
| 3123 | __ bind(&index_out_of_range); |
| 3124 | // When the index is out of range, the spec requires us to return |
| 3125 | // the empty string. |
| 3126 | __ LoadRoot(result, Heap::kempty_stringRootIndex); |
| 3127 | __ b(&done); |
| 3128 | |
| 3129 | __ bind(&need_conversion); |
| 3130 | // Move smi zero into the result register, which will trigger |
| 3131 | // conversion. |
| 3132 | __ LoadSmiLiteral(result, Smi::FromInt(0)); |
| 3133 | __ b(&done); |
| 3134 | |
| 3135 | NopRuntimeCallHelper call_helper; |
| 3136 | generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
| 3137 | |
| 3138 | __ bind(&done); |
| 3139 | context()->Plug(result); |
| 3140 | } |
| 3141 | |
| 3142 | void FullCodeGenerator::EmitCall(CallRuntime* expr) { |
| 3143 | ZoneList<Expression*>* args = expr->arguments(); |
| 3144 | DCHECK_LE(2, args->length()); |
| 3145 | // Push target, receiver and arguments onto the stack. |
| 3146 | for (Expression* const arg : *args) { |
| 3147 | VisitForStackValue(arg); |
| 3148 | } |
| 3149 | PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 3150 | // Move target to r3. |
| 3151 | int const argc = args->length() - 2; |
| 3152 | __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize)); |
| 3153 | // Call the target. |
| 3154 | __ mov(r2, Operand(argc)); |
| 3155 | __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 3156 | OperandStackDepthDecrement(argc + 1); |
| 3157 | // Restore context register. |
| 3158 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3159 | // Discard the function left on TOS. |
| 3160 | context()->DropAndPlug(1, r2); |
| 3161 | } |
| 3162 | |
| 3163 | void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { |
| 3164 | ZoneList<Expression*>* args = expr->arguments(); |
| 3165 | VisitForAccumulatorValue(args->at(0)); |
| 3166 | |
| 3167 | Label materialize_true, materialize_false; |
| 3168 | Label* if_true = NULL; |
| 3169 | Label* if_false = NULL; |
| 3170 | Label* fall_through = NULL; |
| 3171 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3172 | &if_false, &fall_through); |
| 3173 | |
| 3174 | __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
| 3175 | __ AndP(r0, r2, Operand(String::kContainsCachedArrayIndexMask)); |
| 3176 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3177 | Split(eq, if_true, if_false, fall_through); |
| 3178 | |
| 3179 | context()->Plug(if_true, if_false); |
| 3180 | } |
| 3181 | |
| 3182 | void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { |
| 3183 | ZoneList<Expression*>* args = expr->arguments(); |
| 3184 | DCHECK(args->length() == 1); |
| 3185 | VisitForAccumulatorValue(args->at(0)); |
| 3186 | |
| 3187 | __ AssertString(r2); |
| 3188 | |
| 3189 | __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
| 3190 | __ IndexFromHash(r2, r2); |
| 3191 | |
| 3192 | context()->Plug(r2); |
| 3193 | } |
| 3194 | |
| 3195 | void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { |
| 3196 | ZoneList<Expression*>* args = expr->arguments(); |
| 3197 | DCHECK_EQ(1, args->length()); |
| 3198 | VisitForAccumulatorValue(args->at(0)); |
| 3199 | __ AssertFunction(r2); |
| 3200 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3201 | __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); |
| 3202 | context()->Plug(r2); |
| 3203 | } |
| 3204 | |
| 3205 | void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) { |
| 3206 | DCHECK_EQ(0, expr->arguments()->length()); |
| 3207 | __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, r2); |
| 3208 | context()->Plug(r2); |
| 3209 | } |
| 3210 | |
| 3211 | void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { |
| 3212 | DCHECK(expr->arguments()->length() == 0); |
| 3213 | ExternalReference debug_is_active = |
| 3214 | ExternalReference::debug_is_active_address(isolate()); |
| 3215 | __ mov(ip, Operand(debug_is_active)); |
| 3216 | __ LoadlB(r2, MemOperand(ip)); |
| 3217 | __ SmiTag(r2); |
| 3218 | context()->Plug(r2); |
| 3219 | } |
| 3220 | |
| 3221 | void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { |
| 3222 | ZoneList<Expression*>* args = expr->arguments(); |
| 3223 | DCHECK_EQ(2, args->length()); |
| 3224 | VisitForStackValue(args->at(0)); |
| 3225 | VisitForStackValue(args->at(1)); |
| 3226 | |
| 3227 | Label runtime, done; |
| 3228 | |
| 3229 | __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime, TAG_OBJECT); |
| 3230 | __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
| 3231 | __ Pop(r4, r5); |
| 3232 | __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 3233 | __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
| 3234 | __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
| 3235 | __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
| 3236 | __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
| 3237 | __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
| 3238 | STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); |
| 3239 | __ b(&done); |
| 3240 | |
| 3241 | __ bind(&runtime); |
| 3242 | CallRuntimeWithOperands(Runtime::kCreateIterResultObject); |
| 3243 | |
| 3244 | __ bind(&done); |
| 3245 | context()->Plug(r2); |
| 3246 | } |
| 3247 | |
| 3248 | void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { |
| 3249 | // Push function. |
| 3250 | __ LoadNativeContextSlot(expr->context_index(), r2); |
| 3251 | PushOperand(r2); |
| 3252 | |
| 3253 | // Push undefined as the receiver. |
| 3254 | __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 3255 | PushOperand(r2); |
| 3256 | } |
| 3257 | |
| 3258 | void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { |
| 3259 | ZoneList<Expression*>* args = expr->arguments(); |
| 3260 | int arg_count = args->length(); |
| 3261 | |
| 3262 | SetCallPosition(expr); |
| 3263 | __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 3264 | __ mov(r2, Operand(arg_count)); |
| 3265 | __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), |
| 3266 | RelocInfo::CODE_TARGET); |
| 3267 | OperandStackDepthDecrement(arg_count + 1); |
| 3268 | |
| 3269 | // Restore context register. |
| 3270 | __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3271 | } |
| 3272 | |
| 3273 | void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 3274 | switch (expr->op()) { |
| 3275 | case Token::DELETE: { |
| 3276 | Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 3277 | Property* property = expr->expression()->AsProperty(); |
| 3278 | VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
| 3279 | |
| 3280 | if (property != NULL) { |
| 3281 | VisitForStackValue(property->obj()); |
| 3282 | VisitForStackValue(property->key()); |
| 3283 | CallRuntimeWithOperands(is_strict(language_mode()) |
| 3284 | ? Runtime::kDeleteProperty_Strict |
| 3285 | : Runtime::kDeleteProperty_Sloppy); |
| 3286 | context()->Plug(r2); |
| 3287 | } else if (proxy != NULL) { |
| 3288 | Variable* var = proxy->var(); |
| 3289 | // Delete of an unqualified identifier is disallowed in strict mode but |
| 3290 | // "delete this" is allowed. |
| 3291 | bool is_this = var->HasThisName(isolate()); |
| 3292 | DCHECK(is_sloppy(language_mode()) || is_this); |
| 3293 | if (var->IsUnallocatedOrGlobalSlot()) { |
| 3294 | __ LoadGlobalObject(r4); |
| 3295 | __ mov(r3, Operand(var->name())); |
| 3296 | __ Push(r4, r3); |
| 3297 | __ CallRuntime(Runtime::kDeleteProperty_Sloppy); |
| 3298 | context()->Plug(r2); |
| 3299 | } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 3300 | // Result of deleting non-global, non-dynamic variables is false. |
| 3301 | // The subexpression does not have side effects. |
| 3302 | context()->Plug(is_this); |
| 3303 | } else { |
| 3304 | // Non-global variable. Call the runtime to try to delete from the |
| 3305 | // context where the variable was introduced. |
| 3306 | __ Push(var->name()); |
| 3307 | __ CallRuntime(Runtime::kDeleteLookupSlot); |
| 3308 | context()->Plug(r2); |
| 3309 | } |
| 3310 | } else { |
| 3311 | // Result of deleting non-property, non-variable reference is true. |
| 3312 | // The subexpression may have side effects. |
| 3313 | VisitForEffect(expr->expression()); |
| 3314 | context()->Plug(true); |
| 3315 | } |
| 3316 | break; |
| 3317 | } |
| 3318 | |
| 3319 | case Token::VOID: { |
| 3320 | Comment cmnt(masm_, "[ UnaryOperation (VOID)"); |
| 3321 | VisitForEffect(expr->expression()); |
| 3322 | context()->Plug(Heap::kUndefinedValueRootIndex); |
| 3323 | break; |
| 3324 | } |
| 3325 | |
| 3326 | case Token::NOT: { |
| 3327 | Comment cmnt(masm_, "[ UnaryOperation (NOT)"); |
| 3328 | if (context()->IsEffect()) { |
| 3329 | // Unary NOT has no side effects so it's only necessary to visit the |
| 3330 | // subexpression. Match the optimizing compiler by not branching. |
| 3331 | VisitForEffect(expr->expression()); |
| 3332 | } else if (context()->IsTest()) { |
| 3333 | const TestContext* test = TestContext::cast(context()); |
| 3334 | // The labels are swapped for the recursive call. |
| 3335 | VisitForControl(expr->expression(), test->false_label(), |
| 3336 | test->true_label(), test->fall_through()); |
| 3337 | context()->Plug(test->true_label(), test->false_label()); |
| 3338 | } else { |
| 3339 | // We handle value contexts explicitly rather than simply visiting |
| 3340 | // for control and plugging the control flow into the context, |
| 3341 | // because we need to prepare a pair of extra administrative AST ids |
| 3342 | // for the optimizing compiler. |
| 3343 | DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); |
| 3344 | Label materialize_true, materialize_false, done; |
| 3345 | VisitForControl(expr->expression(), &materialize_false, |
| 3346 | &materialize_true, &materialize_true); |
| 3347 | if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); |
| 3348 | __ bind(&materialize_true); |
| 3349 | PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); |
| 3350 | __ LoadRoot(r2, Heap::kTrueValueRootIndex); |
| 3351 | if (context()->IsStackValue()) __ push(r2); |
| 3352 | __ b(&done); |
| 3353 | __ bind(&materialize_false); |
| 3354 | PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); |
| 3355 | __ LoadRoot(r2, Heap::kFalseValueRootIndex); |
| 3356 | if (context()->IsStackValue()) __ push(r2); |
| 3357 | __ bind(&done); |
| 3358 | } |
| 3359 | break; |
| 3360 | } |
| 3361 | |
| 3362 | case Token::TYPEOF: { |
| 3363 | Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); |
| 3364 | { |
| 3365 | AccumulatorValueContext context(this); |
| 3366 | VisitForTypeofValue(expr->expression()); |
| 3367 | } |
| 3368 | __ LoadRR(r5, r2); |
| 3369 | TypeofStub typeof_stub(isolate()); |
| 3370 | __ CallStub(&typeof_stub); |
| 3371 | context()->Plug(r2); |
| 3372 | break; |
| 3373 | } |
| 3374 | |
| 3375 | default: |
| 3376 | UNREACHABLE(); |
| 3377 | } |
| 3378 | } |
| 3379 | |
| 3380 | void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
| 3381 | DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); |
| 3382 | |
| 3383 | Comment cmnt(masm_, "[ CountOperation"); |
| 3384 | |
| 3385 | Property* prop = expr->expression()->AsProperty(); |
| 3386 | LhsKind assign_type = Property::GetAssignType(prop); |
| 3387 | |
| 3388 | // Evaluate expression and get value. |
| 3389 | if (assign_type == VARIABLE) { |
| 3390 | DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); |
| 3391 | AccumulatorValueContext context(this); |
| 3392 | EmitVariableLoad(expr->expression()->AsVariableProxy()); |
| 3393 | } else { |
| 3394 | // Reserve space for result of postfix operation. |
| 3395 | if (expr->is_postfix() && !context()->IsEffect()) { |
| 3396 | __ LoadSmiLiteral(ip, Smi::FromInt(0)); |
| 3397 | PushOperand(ip); |
| 3398 | } |
| 3399 | switch (assign_type) { |
| 3400 | case NAMED_PROPERTY: { |
| 3401 | // Put the object both on the stack and in the register. |
| 3402 | VisitForStackValue(prop->obj()); |
| 3403 | __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 3404 | EmitNamedPropertyLoad(prop); |
| 3405 | break; |
| 3406 | } |
| 3407 | |
| 3408 | case NAMED_SUPER_PROPERTY: { |
| 3409 | VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 3410 | VisitForAccumulatorValue( |
| 3411 | prop->obj()->AsSuperPropertyReference()->home_object()); |
| 3412 | PushOperand(result_register()); |
| 3413 | const Register scratch = r3; |
| 3414 | __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
| 3415 | PushOperands(scratch, result_register()); |
| 3416 | EmitNamedSuperPropertyLoad(prop); |
| 3417 | break; |
| 3418 | } |
| 3419 | |
| 3420 | case KEYED_SUPER_PROPERTY: { |
| 3421 | VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 3422 | VisitForAccumulatorValue( |
| 3423 | prop->obj()->AsSuperPropertyReference()->home_object()); |
| 3424 | const Register scratch = r3; |
| 3425 | const Register scratch1 = r4; |
| 3426 | __ LoadRR(scratch, result_register()); |
| 3427 | VisitForAccumulatorValue(prop->key()); |
| 3428 | PushOperands(scratch, result_register()); |
| 3429 | __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
| 3430 | PushOperands(scratch1, scratch, result_register()); |
| 3431 | EmitKeyedSuperPropertyLoad(prop); |
| 3432 | break; |
| 3433 | } |
| 3434 | |
| 3435 | case KEYED_PROPERTY: { |
| 3436 | VisitForStackValue(prop->obj()); |
| 3437 | VisitForStackValue(prop->key()); |
| 3438 | __ LoadP(LoadDescriptor::ReceiverRegister(), |
| 3439 | MemOperand(sp, 1 * kPointerSize)); |
| 3440 | __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0)); |
| 3441 | EmitKeyedPropertyLoad(prop); |
| 3442 | break; |
| 3443 | } |
| 3444 | |
| 3445 | case VARIABLE: |
| 3446 | UNREACHABLE(); |
| 3447 | } |
| 3448 | } |
| 3449 | |
| 3450 | // We need a second deoptimization point after loading the value |
| 3451 | // in case evaluating the property load my have a side effect. |
| 3452 | if (assign_type == VARIABLE) { |
| 3453 | PrepareForBailout(expr->expression(), TOS_REG); |
| 3454 | } else { |
| 3455 | PrepareForBailoutForId(prop->LoadId(), TOS_REG); |
| 3456 | } |
| 3457 | |
| 3458 | // Inline smi case if we are in a loop. |
| 3459 | Label stub_call, done; |
| 3460 | JumpPatchSite patch_site(masm_); |
| 3461 | |
| 3462 | int count_value = expr->op() == Token::INC ? 1 : -1; |
| 3463 | if (ShouldInlineSmiCase(expr->op())) { |
| 3464 | Label slow; |
| 3465 | patch_site.EmitJumpIfNotSmi(r2, &slow); |
| 3466 | |
| 3467 | // Save result for postfix expressions. |
| 3468 | if (expr->is_postfix()) { |
| 3469 | if (!context()->IsEffect()) { |
| 3470 | // Save the result on the stack. If we have a named or keyed property |
| 3471 | // we store the result under the receiver that is currently on top |
| 3472 | // of the stack. |
| 3473 | switch (assign_type) { |
| 3474 | case VARIABLE: |
| 3475 | __ push(r2); |
| 3476 | break; |
| 3477 | case NAMED_PROPERTY: |
| 3478 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 3479 | break; |
| 3480 | case NAMED_SUPER_PROPERTY: |
| 3481 | __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3482 | break; |
| 3483 | case KEYED_PROPERTY: |
| 3484 | __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3485 | break; |
| 3486 | case KEYED_SUPER_PROPERTY: |
| 3487 | __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
| 3488 | break; |
| 3489 | } |
| 3490 | } |
| 3491 | } |
| 3492 | |
| 3493 | Register scratch1 = r3; |
| 3494 | Register scratch2 = r4; |
| 3495 | __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); |
| 3496 | __ AddAndCheckForOverflow(r2, r2, scratch1, scratch2, r0); |
| 3497 | __ BranchOnNoOverflow(&done); |
| 3498 | // Call stub. Undo operation first. |
| 3499 | __ SubP(r2, r2, scratch1); |
| 3500 | __ b(&stub_call); |
| 3501 | __ bind(&slow); |
| 3502 | } |
| 3503 | |
| 3504 | // Convert old value into a number. |
| 3505 | ToNumberStub convert_stub(isolate()); |
| 3506 | __ CallStub(&convert_stub); |
| 3507 | PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); |
| 3508 | |
| 3509 | // Save result for postfix expressions. |
| 3510 | if (expr->is_postfix()) { |
| 3511 | if (!context()->IsEffect()) { |
| 3512 | // Save the result on the stack. If we have a named or keyed property |
| 3513 | // we store the result under the receiver that is currently on top |
| 3514 | // of the stack. |
| 3515 | switch (assign_type) { |
| 3516 | case VARIABLE: |
| 3517 | PushOperand(r2); |
| 3518 | break; |
| 3519 | case NAMED_PROPERTY: |
| 3520 | __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 3521 | break; |
| 3522 | case NAMED_SUPER_PROPERTY: |
| 3523 | __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3524 | break; |
| 3525 | case KEYED_PROPERTY: |
| 3526 | __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3527 | break; |
| 3528 | case KEYED_SUPER_PROPERTY: |
| 3529 | __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
| 3530 | break; |
| 3531 | } |
| 3532 | } |
| 3533 | } |
| 3534 | |
| 3535 | __ bind(&stub_call); |
| 3536 | __ LoadRR(r3, r2); |
| 3537 | __ LoadSmiLiteral(r2, Smi::FromInt(count_value)); |
| 3538 | |
| 3539 | SetExpressionPosition(expr); |
| 3540 | |
| 3541 | Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); |
| 3542 | CallIC(code, expr->CountBinOpFeedbackId()); |
| 3543 | patch_site.EmitPatchInfo(); |
| 3544 | __ bind(&done); |
| 3545 | |
| 3546 | // Store the value returned in r2. |
| 3547 | switch (assign_type) { |
| 3548 | case VARIABLE: |
| 3549 | if (expr->is_postfix()) { |
| 3550 | { |
| 3551 | EffectContext context(this); |
| 3552 | EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3553 | Token::ASSIGN, expr->CountSlot()); |
| 3554 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3555 | context.Plug(r2); |
| 3556 | } |
| 3557 | // For all contexts except EffectConstant We have the result on |
| 3558 | // top of the stack. |
| 3559 | if (!context()->IsEffect()) { |
| 3560 | context()->PlugTOS(); |
| 3561 | } |
| 3562 | } else { |
| 3563 | EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3564 | Token::ASSIGN, expr->CountSlot()); |
| 3565 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3566 | context()->Plug(r2); |
| 3567 | } |
| 3568 | break; |
| 3569 | case NAMED_PROPERTY: { |
| 3570 | __ mov(StoreDescriptor::NameRegister(), |
| 3571 | Operand(prop->key()->AsLiteral()->value())); |
| 3572 | PopOperand(StoreDescriptor::ReceiverRegister()); |
| 3573 | EmitLoadStoreICSlot(expr->CountSlot()); |
| 3574 | CallStoreIC(); |
| 3575 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3576 | if (expr->is_postfix()) { |
| 3577 | if (!context()->IsEffect()) { |
| 3578 | context()->PlugTOS(); |
| 3579 | } |
| 3580 | } else { |
| 3581 | context()->Plug(r2); |
| 3582 | } |
| 3583 | break; |
| 3584 | } |
| 3585 | case NAMED_SUPER_PROPERTY: { |
| 3586 | EmitNamedSuperPropertyStore(prop); |
| 3587 | if (expr->is_postfix()) { |
| 3588 | if (!context()->IsEffect()) { |
| 3589 | context()->PlugTOS(); |
| 3590 | } |
| 3591 | } else { |
| 3592 | context()->Plug(r2); |
| 3593 | } |
| 3594 | break; |
| 3595 | } |
| 3596 | case KEYED_SUPER_PROPERTY: { |
| 3597 | EmitKeyedSuperPropertyStore(prop); |
| 3598 | if (expr->is_postfix()) { |
| 3599 | if (!context()->IsEffect()) { |
| 3600 | context()->PlugTOS(); |
| 3601 | } |
| 3602 | } else { |
| 3603 | context()->Plug(r2); |
| 3604 | } |
| 3605 | break; |
| 3606 | } |
| 3607 | case KEYED_PROPERTY: { |
| 3608 | PopOperands(StoreDescriptor::ReceiverRegister(), |
| 3609 | StoreDescriptor::NameRegister()); |
| 3610 | Handle<Code> ic = |
| 3611 | CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 3612 | EmitLoadStoreICSlot(expr->CountSlot()); |
| 3613 | CallIC(ic); |
| 3614 | PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3615 | if (expr->is_postfix()) { |
| 3616 | if (!context()->IsEffect()) { |
| 3617 | context()->PlugTOS(); |
| 3618 | } |
| 3619 | } else { |
| 3620 | context()->Plug(r2); |
| 3621 | } |
| 3622 | break; |
| 3623 | } |
| 3624 | } |
| 3625 | } |
| 3626 | |
| 3627 | void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, |
| 3628 | Expression* sub_expr, |
| 3629 | Handle<String> check) { |
| 3630 | Label materialize_true, materialize_false; |
| 3631 | Label* if_true = NULL; |
| 3632 | Label* if_false = NULL; |
| 3633 | Label* fall_through = NULL; |
| 3634 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3635 | &if_false, &fall_through); |
| 3636 | |
| 3637 | { |
| 3638 | AccumulatorValueContext context(this); |
| 3639 | VisitForTypeofValue(sub_expr); |
| 3640 | } |
| 3641 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3642 | |
| 3643 | Factory* factory = isolate()->factory(); |
| 3644 | if (String::Equals(check, factory->number_string())) { |
| 3645 | __ JumpIfSmi(r2, if_true); |
| 3646 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3647 | __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex); |
| 3648 | Split(eq, if_true, if_false, fall_through); |
| 3649 | } else if (String::Equals(check, factory->string_string())) { |
| 3650 | __ JumpIfSmi(r2, if_false); |
| 3651 | __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE); |
| 3652 | Split(lt, if_true, if_false, fall_through); |
| 3653 | } else if (String::Equals(check, factory->symbol_string())) { |
| 3654 | __ JumpIfSmi(r2, if_false); |
| 3655 | __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE); |
| 3656 | Split(eq, if_true, if_false, fall_through); |
| 3657 | } else if (String::Equals(check, factory->boolean_string())) { |
| 3658 | __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3659 | __ beq(if_true); |
| 3660 | __ CompareRoot(r2, Heap::kFalseValueRootIndex); |
| 3661 | Split(eq, if_true, if_false, fall_through); |
| 3662 | } else if (String::Equals(check, factory->undefined_string())) { |
| 3663 | __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 3664 | __ beq(if_false); |
| 3665 | __ JumpIfSmi(r2, if_false); |
| 3666 | // Check for undetectable objects => true. |
| 3667 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3668 | __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
| 3669 | Operand(1 << Map::kIsUndetectable)); |
| 3670 | Split(ne, if_true, if_false, fall_through); |
| 3671 | |
| 3672 | } else if (String::Equals(check, factory->function_string())) { |
| 3673 | __ JumpIfSmi(r2, if_false); |
| 3674 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3675 | __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 3676 | __ AndP(r3, r3, |
| 3677 | Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
| 3678 | __ CmpP(r3, Operand(1 << Map::kIsCallable)); |
| 3679 | Split(eq, if_true, if_false, fall_through); |
| 3680 | } else if (String::Equals(check, factory->object_string())) { |
| 3681 | __ JumpIfSmi(r2, if_false); |
| 3682 | __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 3683 | __ beq(if_true); |
| 3684 | STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 3685 | __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
| 3686 | __ blt(if_false); |
| 3687 | __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
| 3688 | Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
| 3689 | Split(eq, if_true, if_false, fall_through); |
| 3690 | // clang-format off |
| 3691 | #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ |
| 3692 | } else if (String::Equals(check, factory->type##_string())) { \ |
| 3693 | __ JumpIfSmi(r2, if_false); \ |
| 3694 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \ |
| 3695 | __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \ |
| 3696 | Split(eq, if_true, if_false, fall_through); |
| 3697 | SIMD128_TYPES(SIMD128_TYPE) |
| 3698 | #undef SIMD128_TYPE |
| 3699 | // clang-format on |
| 3700 | } else { |
| 3701 | if (if_false != fall_through) __ b(if_false); |
| 3702 | } |
| 3703 | context()->Plug(if_true, if_false); |
| 3704 | } |
| 3705 | |
| 3706 | void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
| 3707 | Comment cmnt(masm_, "[ CompareOperation"); |
| 3708 | SetExpressionPosition(expr); |
| 3709 | |
| 3710 | // First we try a fast inlined version of the compare when one of |
| 3711 | // the operands is a literal. |
| 3712 | if (TryLiteralCompare(expr)) return; |
| 3713 | |
| 3714 | // Always perform the comparison for its control flow. Pack the result |
| 3715 | // into the expression's context after the comparison is performed. |
| 3716 | Label materialize_true, materialize_false; |
| 3717 | Label* if_true = NULL; |
| 3718 | Label* if_false = NULL; |
| 3719 | Label* fall_through = NULL; |
| 3720 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3721 | &if_false, &fall_through); |
| 3722 | |
| 3723 | Token::Value op = expr->op(); |
| 3724 | VisitForStackValue(expr->left()); |
| 3725 | switch (op) { |
| 3726 | case Token::IN: |
| 3727 | VisitForStackValue(expr->right()); |
| 3728 | CallRuntimeWithOperands(Runtime::kHasProperty); |
| 3729 | PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
| 3730 | __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3731 | Split(eq, if_true, if_false, fall_through); |
| 3732 | break; |
| 3733 | |
| 3734 | case Token::INSTANCEOF: { |
| 3735 | VisitForAccumulatorValue(expr->right()); |
| 3736 | PopOperand(r3); |
| 3737 | InstanceOfStub stub(isolate()); |
| 3738 | __ CallStub(&stub); |
| 3739 | PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
| 3740 | __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3741 | Split(eq, if_true, if_false, fall_through); |
| 3742 | break; |
| 3743 | } |
| 3744 | |
| 3745 | default: { |
| 3746 | VisitForAccumulatorValue(expr->right()); |
| 3747 | Condition cond = CompareIC::ComputeCondition(op); |
| 3748 | PopOperand(r3); |
| 3749 | |
| 3750 | bool inline_smi_code = ShouldInlineSmiCase(op); |
| 3751 | JumpPatchSite patch_site(masm_); |
| 3752 | if (inline_smi_code) { |
| 3753 | Label slow_case; |
| 3754 | __ LoadRR(r4, r3); |
| 3755 | __ OrP(r4, r2); |
| 3756 | patch_site.EmitJumpIfNotSmi(r4, &slow_case); |
| 3757 | __ CmpP(r3, r2); |
| 3758 | Split(cond, if_true, if_false, NULL); |
| 3759 | __ bind(&slow_case); |
| 3760 | } |
| 3761 | |
| 3762 | Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); |
| 3763 | CallIC(ic, expr->CompareOperationFeedbackId()); |
| 3764 | patch_site.EmitPatchInfo(); |
| 3765 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3766 | __ CmpP(r2, Operand::Zero()); |
| 3767 | Split(cond, if_true, if_false, fall_through); |
| 3768 | } |
| 3769 | } |
| 3770 | |
| 3771 | // Convert the result of the comparison into one expected for this |
| 3772 | // expression's context. |
| 3773 | context()->Plug(if_true, if_false); |
| 3774 | } |
| 3775 | |
| 3776 | void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, |
| 3777 | Expression* sub_expr, |
| 3778 | NilValue nil) { |
| 3779 | Label materialize_true, materialize_false; |
| 3780 | Label* if_true = NULL; |
| 3781 | Label* if_false = NULL; |
| 3782 | Label* fall_through = NULL; |
| 3783 | context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3784 | &if_false, &fall_through); |
| 3785 | |
| 3786 | VisitForAccumulatorValue(sub_expr); |
| 3787 | PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3788 | if (expr->op() == Token::EQ_STRICT) { |
| 3789 | Heap::RootListIndex nil_value = nil == kNullValue |
| 3790 | ? Heap::kNullValueRootIndex |
| 3791 | : Heap::kUndefinedValueRootIndex; |
| 3792 | __ CompareRoot(r2, nil_value); |
| 3793 | Split(eq, if_true, if_false, fall_through); |
| 3794 | } else { |
| 3795 | __ JumpIfSmi(r2, if_false); |
| 3796 | __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3797 | __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 3798 | __ AndP(r0, r3, Operand(1 << Map::kIsUndetectable)); |
| 3799 | Split(ne, if_true, if_false, fall_through); |
| 3800 | } |
| 3801 | context()->Plug(if_true, if_false); |
| 3802 | } |
| 3803 | Register FullCodeGenerator::result_register() { return r2; } |
| 3804 | |
| 3805 | Register FullCodeGenerator::context_register() { return cp; } |
| 3806 | |
| 3807 | void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { |
| 3808 | DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); |
| 3809 | __ LoadP(value, MemOperand(fp, frame_offset)); |
| 3810 | } |
| 3811 | |
| 3812 | void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| 3813 | DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); |
| 3814 | __ StoreP(value, MemOperand(fp, frame_offset)); |
| 3815 | } |
| 3816 | |
| 3817 | void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
| 3818 | __ LoadP(dst, ContextMemOperand(cp, context_index), r0); |
| 3819 | } |
| 3820 | |
| 3821 | void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
| 3822 | Scope* closure_scope = scope()->ClosureScope(); |
| 3823 | if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) { |
| 3824 | // Contexts nested in the native context have a canonical empty function |
| 3825 | // as their closure, not the anonymous closure containing the global |
| 3826 | // code. |
| 3827 | __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); |
| 3828 | } else if (closure_scope->is_eval_scope()) { |
| 3829 | // Contexts created by a call to eval have the same closure as the |
| 3830 | // context calling eval, not the anonymous closure containing the eval |
| 3831 | // code. Fetch it from the context. |
| 3832 | __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); |
| 3833 | } else { |
| 3834 | DCHECK(closure_scope->is_function_scope()); |
| 3835 | __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3836 | } |
| 3837 | PushOperand(ip); |
| 3838 | } |
| 3839 | |
| 3840 | // ---------------------------------------------------------------------------- |
| 3841 | // Non-local control flow support. |
| 3842 | |
| 3843 | void FullCodeGenerator::EnterFinallyBlock() { |
| 3844 | DCHECK(!result_register().is(r3)); |
| 3845 | // Store pending message while executing finally block. |
| 3846 | ExternalReference pending_message_obj = |
| 3847 | ExternalReference::address_of_pending_message_obj(isolate()); |
| 3848 | __ mov(ip, Operand(pending_message_obj)); |
| 3849 | __ LoadP(r3, MemOperand(ip)); |
| 3850 | PushOperand(r3); |
| 3851 | |
| 3852 | ClearPendingMessage(); |
| 3853 | } |
| 3854 | |
| 3855 | void FullCodeGenerator::ExitFinallyBlock() { |
| 3856 | DCHECK(!result_register().is(r3)); |
| 3857 | // Restore pending message from stack. |
| 3858 | PopOperand(r3); |
| 3859 | ExternalReference pending_message_obj = |
| 3860 | ExternalReference::address_of_pending_message_obj(isolate()); |
| 3861 | __ mov(ip, Operand(pending_message_obj)); |
| 3862 | __ StoreP(r3, MemOperand(ip)); |
| 3863 | } |
| 3864 | |
| 3865 | void FullCodeGenerator::ClearPendingMessage() { |
| 3866 | DCHECK(!result_register().is(r3)); |
| 3867 | ExternalReference pending_message_obj = |
| 3868 | ExternalReference::address_of_pending_message_obj(isolate()); |
| 3869 | __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
| 3870 | __ mov(ip, Operand(pending_message_obj)); |
| 3871 | __ StoreP(r3, MemOperand(ip)); |
| 3872 | } |
| 3873 | |
| 3874 | void FullCodeGenerator::DeferredCommands::EmitCommands() { |
| 3875 | DCHECK(!result_register().is(r3)); |
| 3876 | // Restore the accumulator (r2) and token (r3). |
| 3877 | __ Pop(r3, result_register()); |
| 3878 | for (DeferredCommand cmd : commands_) { |
| 3879 | Label skip; |
| 3880 | __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0); |
| 3881 | __ bne(&skip); |
| 3882 | switch (cmd.command) { |
| 3883 | case kReturn: |
| 3884 | codegen_->EmitUnwindAndReturn(); |
| 3885 | break; |
| 3886 | case kThrow: |
| 3887 | __ Push(result_register()); |
| 3888 | __ CallRuntime(Runtime::kReThrow); |
| 3889 | break; |
| 3890 | case kContinue: |
| 3891 | codegen_->EmitContinue(cmd.target); |
| 3892 | break; |
| 3893 | case kBreak: |
| 3894 | codegen_->EmitBreak(cmd.target); |
| 3895 | break; |
| 3896 | } |
| 3897 | __ bind(&skip); |
| 3898 | } |
| 3899 | } |
| 3900 | |
| 3901 | #undef __ |
| 3902 | |
| 3903 | #if V8_TARGET_ARCH_S390X |
| 3904 | static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011; |
| 3905 | static const FourByteInstr kOSRBranchInstruction = 0xA7040011; |
| 3906 | static const int16_t kBackEdgeBranchOffset = 0x11 * 2; |
| 3907 | #else |
| 3908 | static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D; |
| 3909 | static const FourByteInstr kOSRBranchInstruction = 0xA704000D; |
| 3910 | static const int16_t kBackEdgeBranchOffset = 0xD * 2; |
| 3911 | #endif |
| 3912 | |
| 3913 | void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, |
| 3914 | BackEdgeState target_state, |
| 3915 | Code* replacement_code) { |
| 3916 | Address call_address = Assembler::target_address_from_return_address(pc); |
| 3917 | Address branch_address = call_address - 4; |
| 3918 | Isolate* isolate = unoptimized_code->GetIsolate(); |
| 3919 | CodePatcher patcher(isolate, branch_address, 4); |
| 3920 | |
| 3921 | switch (target_state) { |
| 3922 | case INTERRUPT: { |
| 3923 | // <decrement profiling counter> |
| 3924 | // bge <ok> ;; patched to GE BRC |
| 3925 | // brasrl r14, <interrupt stub address> |
| 3926 | // <reset profiling counter> |
| 3927 | // ok-label |
| 3928 | patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset)); |
| 3929 | break; |
| 3930 | } |
| 3931 | case ON_STACK_REPLACEMENT: |
| 3932 | // <decrement profiling counter> |
| 3933 | // brc 0x0, <ok> ;; patched to NOP BRC |
| 3934 | // brasrl r14, <interrupt stub address> |
| 3935 | // <reset profiling counter> |
| 3936 | // ok-label ----- pc_after points here |
| 3937 | patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset)); |
| 3938 | break; |
| 3939 | } |
| 3940 | |
| 3941 | // Replace the stack check address in the mov sequence with the |
| 3942 | // entry address of the replacement code. |
| 3943 | Assembler::set_target_address_at(isolate, call_address, unoptimized_code, |
| 3944 | replacement_code->entry()); |
| 3945 | |
| 3946 | unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 3947 | unoptimized_code, call_address, replacement_code); |
| 3948 | } |
| 3949 | |
| 3950 | BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
| 3951 | Isolate* isolate, Code* unoptimized_code, Address pc) { |
| 3952 | Address call_address = Assembler::target_address_from_return_address(pc); |
| 3953 | Address branch_address = call_address - 4; |
| 3954 | #ifdef DEBUG |
| 3955 | Address interrupt_address = |
| 3956 | Assembler::target_address_at(call_address, unoptimized_code); |
| 3957 | #endif |
| 3958 | |
| 3959 | DCHECK(BRC == Instruction::S390OpcodeValue(branch_address)); |
| 3960 | // For interrupt, we expect a branch greater than or equal |
| 3961 | // i.e. BRC 0xa, +XXXX (0xA7A4XXXX) |
| 3962 | FourByteInstr br_instr = Instruction::InstructionBits( |
| 3963 | reinterpret_cast<const byte*>(branch_address)); |
| 3964 | if (kInterruptBranchInstruction == br_instr) { |
| 3965 | DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); |
| 3966 | return INTERRUPT; |
| 3967 | } |
| 3968 | |
| 3969 | // Expect BRC to be patched to NOP branch. |
| 3970 | // i.e. BRC 0x0, +XXXX (0xA704XXXX) |
| 3971 | USE(kOSRBranchInstruction); |
| 3972 | DCHECK(kOSRBranchInstruction == br_instr); |
| 3973 | |
| 3974 | DCHECK(interrupt_address == |
| 3975 | isolate->builtins()->OnStackReplacement()->entry()); |
| 3976 | return ON_STACK_REPLACEMENT; |
| 3977 | } |
| 3978 | |
| 3979 | } // namespace internal |
| 3980 | } // namespace v8 |
| 3981 | #endif // V8_TARGET_ARCH_S390 |