Ben Murdoch | b8a8cc1 | 2014-11-26 15:28:44 +0000 | [diff] [blame^] | 1 | // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "src/v8.h" |
| 6 | |
| 7 | #if V8_TARGET_ARCH_MIPS64 |
| 8 | |
| 9 | #include "src/ic/call-optimization.h" |
| 10 | #include "src/ic/handler-compiler.h" |
| 11 | #include "src/ic/ic.h" |
| 12 | |
| 13 | namespace v8 { |
| 14 | namespace internal { |
| 15 | |
| 16 | #define __ ACCESS_MASM(masm) |
| 17 | |
| 18 | |
| 19 | void NamedLoadHandlerCompiler::GenerateLoadViaGetter( |
| 20 | MacroAssembler* masm, Handle<HeapType> type, Register receiver, |
| 21 | Handle<JSFunction> getter) { |
| 22 | // ----------- S t a t e ------------- |
| 23 | // -- a0 : receiver |
| 24 | // -- a2 : name |
| 25 | // -- ra : return address |
| 26 | // ----------------------------------- |
| 27 | { |
| 28 | FrameScope scope(masm, StackFrame::INTERNAL); |
| 29 | |
| 30 | if (!getter.is_null()) { |
| 31 | // Call the JavaScript getter with the receiver on the stack. |
| 32 | if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 33 | // Swap in the global receiver. |
| 34 | __ ld(receiver, |
| 35 | FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
| 36 | } |
| 37 | __ push(receiver); |
| 38 | ParameterCount actual(0); |
| 39 | ParameterCount expected(getter); |
| 40 | __ InvokeFunction(getter, expected, actual, CALL_FUNCTION, |
| 41 | NullCallWrapper()); |
| 42 | } else { |
| 43 | // If we generate a global code snippet for deoptimization only, remember |
| 44 | // the place to continue after deoptimization. |
| 45 | masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
| 46 | } |
| 47 | |
| 48 | // Restore context register. |
| 49 | __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 50 | } |
| 51 | __ Ret(); |
| 52 | } |
| 53 | |
| 54 | |
| 55 | void NamedStoreHandlerCompiler::GenerateStoreViaSetter( |
| 56 | MacroAssembler* masm, Handle<HeapType> type, Register receiver, |
| 57 | Handle<JSFunction> setter) { |
| 58 | // ----------- S t a t e ------------- |
| 59 | // -- ra : return address |
| 60 | // ----------------------------------- |
| 61 | { |
| 62 | FrameScope scope(masm, StackFrame::INTERNAL); |
| 63 | |
| 64 | // Save value register, so we can restore it later. |
| 65 | __ push(value()); |
| 66 | |
| 67 | if (!setter.is_null()) { |
| 68 | // Call the JavaScript setter with receiver and value on the stack. |
| 69 | if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 70 | // Swap in the global receiver. |
| 71 | __ ld(receiver, |
| 72 | FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
| 73 | } |
| 74 | __ Push(receiver, value()); |
| 75 | ParameterCount actual(1); |
| 76 | ParameterCount expected(setter); |
| 77 | __ InvokeFunction(setter, expected, actual, CALL_FUNCTION, |
| 78 | NullCallWrapper()); |
| 79 | } else { |
| 80 | // If we generate a global code snippet for deoptimization only, remember |
| 81 | // the place to continue after deoptimization. |
| 82 | masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
| 83 | } |
| 84 | |
| 85 | // We have to return the passed value, not the return value of the setter. |
| 86 | __ pop(v0); |
| 87 | |
| 88 | // Restore context register. |
| 89 | __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 90 | } |
| 91 | __ Ret(); |
| 92 | } |
| 93 | |
| 94 | |
| 95 | void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( |
| 96 | MacroAssembler* masm, Label* miss_label, Register receiver, |
| 97 | Handle<Name> name, Register scratch0, Register scratch1) { |
| 98 | DCHECK(name->IsUniqueName()); |
| 99 | DCHECK(!receiver.is(scratch0)); |
| 100 | Counters* counters = masm->isolate()->counters(); |
| 101 | __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); |
| 102 | __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
| 103 | |
| 104 | Label done; |
| 105 | |
| 106 | const int kInterceptorOrAccessCheckNeededMask = |
| 107 | (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
| 108 | |
| 109 | // Bail out if the receiver has a named interceptor or requires access checks. |
| 110 | Register map = scratch1; |
| 111 | __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 112 | __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); |
| 113 | __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); |
| 114 | __ Branch(miss_label, ne, scratch0, Operand(zero_reg)); |
| 115 | |
| 116 | // Check that receiver is a JSObject. |
| 117 | __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 118 | __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 119 | |
| 120 | // Load properties array. |
| 121 | Register properties = scratch0; |
| 122 | __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 123 | // Check that the properties array is a dictionary. |
| 124 | __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset)); |
| 125 | Register tmp = properties; |
| 126 | __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); |
| 127 | __ Branch(miss_label, ne, map, Operand(tmp)); |
| 128 | |
| 129 | // Restore the temporarily used register. |
| 130 | __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 131 | |
| 132 | |
| 133 | NameDictionaryLookupStub::GenerateNegativeLookup( |
| 134 | masm, miss_label, &done, receiver, properties, name, scratch1); |
| 135 | __ bind(&done); |
| 136 | __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
| 137 | } |
| 138 | |
| 139 | |
| 140 | void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 141 | MacroAssembler* masm, int index, Register prototype, Label* miss) { |
| 142 | Isolate* isolate = masm->isolate(); |
| 143 | // Get the global function with the given index. |
| 144 | Handle<JSFunction> function( |
| 145 | JSFunction::cast(isolate->native_context()->get(index))); |
| 146 | |
| 147 | // Check we're still in the same context. |
| 148 | Register scratch = prototype; |
| 149 | const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
| 150 | __ ld(scratch, MemOperand(cp, offset)); |
| 151 | __ ld(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); |
| 152 | __ ld(scratch, MemOperand(scratch, Context::SlotOffset(index))); |
| 153 | __ li(at, function); |
| 154 | __ Branch(miss, ne, at, Operand(scratch)); |
| 155 | |
| 156 | // Load its initial map. The global functions all have initial maps. |
| 157 | __ li(prototype, Handle<Map>(function->initial_map())); |
| 158 | // Load the prototype from the initial map. |
| 159 | __ ld(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 160 | } |
| 161 | |
| 162 | |
| 163 | void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( |
| 164 | MacroAssembler* masm, Register receiver, Register scratch1, |
| 165 | Register scratch2, Label* miss_label) { |
| 166 | __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 167 | __ Ret(USE_DELAY_SLOT); |
| 168 | __ mov(v0, scratch1); |
| 169 | } |
| 170 | |
| 171 | |
| 172 | // Generate code to check that a global property cell is empty. Create |
| 173 | // the property cell at compilation time if no cell exists for the |
| 174 | // property. |
| 175 | void PropertyHandlerCompiler::GenerateCheckPropertyCell( |
| 176 | MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, |
| 177 | Register scratch, Label* miss) { |
| 178 | Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); |
| 179 | DCHECK(cell->value()->IsTheHole()); |
| 180 | __ li(scratch, Operand(cell)); |
| 181 | __ ld(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 182 | __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 183 | __ Branch(miss, ne, scratch, Operand(at)); |
| 184 | } |
| 185 | |
| 186 | |
| 187 | static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, |
| 188 | Register holder, Register name, |
| 189 | Handle<JSObject> holder_obj) { |
| 190 | STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); |
| 191 | STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); |
| 192 | STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); |
| 193 | STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); |
| 194 | STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); |
| 195 | __ push(name); |
| 196 | Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); |
| 197 | DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor)); |
| 198 | Register scratch = name; |
| 199 | __ li(scratch, Operand(interceptor)); |
| 200 | __ Push(scratch, receiver, holder); |
| 201 | } |
| 202 | |
| 203 | |
| 204 | static void CompileCallLoadPropertyWithInterceptor( |
| 205 | MacroAssembler* masm, Register receiver, Register holder, Register name, |
| 206 | Handle<JSObject> holder_obj, IC::UtilityId id) { |
| 207 | PushInterceptorArguments(masm, receiver, holder, name, holder_obj); |
| 208 | __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), |
| 209 | NamedLoadHandlerCompiler::kInterceptorArgsLength); |
| 210 | } |
| 211 | |
| 212 | |
| 213 | // Generate call to api function. |
| 214 | void PropertyHandlerCompiler::GenerateFastApiCall( |
| 215 | MacroAssembler* masm, const CallOptimization& optimization, |
| 216 | Handle<Map> receiver_map, Register receiver, Register scratch_in, |
| 217 | bool is_store, int argc, Register* values) { |
| 218 | DCHECK(!receiver.is(scratch_in)); |
| 219 | // Preparing to push, adjust sp. |
| 220 | __ Dsubu(sp, sp, Operand((argc + 1) * kPointerSize)); |
| 221 | __ sd(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver. |
| 222 | // Write the arguments to stack frame. |
| 223 | for (int i = 0; i < argc; i++) { |
| 224 | Register arg = values[argc - 1 - i]; |
| 225 | DCHECK(!receiver.is(arg)); |
| 226 | DCHECK(!scratch_in.is(arg)); |
| 227 | __ sd(arg, MemOperand(sp, (argc - 1 - i) * kPointerSize)); // Push arg. |
| 228 | } |
| 229 | DCHECK(optimization.is_simple_api_call()); |
| 230 | |
| 231 | // Abi for CallApiFunctionStub. |
| 232 | Register callee = a0; |
| 233 | Register call_data = a4; |
| 234 | Register holder = a2; |
| 235 | Register api_function_address = a1; |
| 236 | |
| 237 | // Put holder in place. |
| 238 | CallOptimization::HolderLookup holder_lookup; |
| 239 | Handle<JSObject> api_holder = |
| 240 | optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); |
| 241 | switch (holder_lookup) { |
| 242 | case CallOptimization::kHolderIsReceiver: |
| 243 | __ Move(holder, receiver); |
| 244 | break; |
| 245 | case CallOptimization::kHolderFound: |
| 246 | __ li(holder, api_holder); |
| 247 | break; |
| 248 | case CallOptimization::kHolderNotFound: |
| 249 | UNREACHABLE(); |
| 250 | break; |
| 251 | } |
| 252 | |
| 253 | Isolate* isolate = masm->isolate(); |
| 254 | Handle<JSFunction> function = optimization.constant_function(); |
| 255 | Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
| 256 | Handle<Object> call_data_obj(api_call_info->data(), isolate); |
| 257 | |
| 258 | // Put callee in place. |
| 259 | __ li(callee, function); |
| 260 | |
| 261 | bool call_data_undefined = false; |
| 262 | // Put call_data in place. |
| 263 | if (isolate->heap()->InNewSpace(*call_data_obj)) { |
| 264 | __ li(call_data, api_call_info); |
| 265 | __ ld(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); |
| 266 | } else if (call_data_obj->IsUndefined()) { |
| 267 | call_data_undefined = true; |
| 268 | __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); |
| 269 | } else { |
| 270 | __ li(call_data, call_data_obj); |
| 271 | } |
| 272 | // Put api_function_address in place. |
| 273 | Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
| 274 | ApiFunction fun(function_address); |
| 275 | ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; |
| 276 | ExternalReference ref = ExternalReference(&fun, type, masm->isolate()); |
| 277 | __ li(api_function_address, Operand(ref)); |
| 278 | |
| 279 | // Jump to stub. |
| 280 | CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc); |
| 281 | __ TailCallStub(&stub); |
| 282 | } |
| 283 | |
| 284 | |
| 285 | void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) { |
| 286 | // Push receiver, key and value for runtime call. |
| 287 | __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), |
| 288 | StoreDescriptor::ValueRegister()); |
| 289 | |
| 290 | // The slow case calls into the runtime to complete the store without causing |
| 291 | // an IC miss that would otherwise cause a transition to the generic stub. |
| 292 | ExternalReference ref = |
| 293 | ExternalReference(IC_Utility(IC::kStoreIC_Slow), masm->isolate()); |
| 294 | __ TailCallExternalReference(ref, 3, 1); |
| 295 | } |
| 296 | |
| 297 | |
| 298 | void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) { |
| 299 | // Push receiver, key and value for runtime call. |
| 300 | __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), |
| 301 | StoreDescriptor::ValueRegister()); |
| 302 | |
| 303 | // The slow case calls into the runtime to complete the store without causing |
| 304 | // an IC miss that would otherwise cause a transition to the generic stub. |
| 305 | ExternalReference ref = |
| 306 | ExternalReference(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate()); |
| 307 | __ TailCallExternalReference(ref, 3, 1); |
| 308 | } |
| 309 | |
| 310 | |
| 311 | #undef __ |
| 312 | #define __ ACCESS_MASM(masm()) |
| 313 | |
| 314 | |
| 315 | void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, |
| 316 | Handle<Name> name) { |
| 317 | if (!label->is_unused()) { |
| 318 | __ bind(label); |
| 319 | __ li(this->name(), Operand(name)); |
| 320 | } |
| 321 | } |
| 322 | |
| 323 | |
| 324 | // Generate StoreTransition code, value is passed in a0 register. |
| 325 | // After executing generated code, the receiver_reg and name_reg |
| 326 | // may be clobbered. |
| 327 | void NamedStoreHandlerCompiler::GenerateStoreTransition( |
| 328 | Handle<Map> transition, Handle<Name> name, Register receiver_reg, |
| 329 | Register storage_reg, Register value_reg, Register scratch1, |
| 330 | Register scratch2, Register scratch3, Label* miss_label, Label* slow) { |
| 331 | // a0 : value. |
| 332 | Label exit; |
| 333 | |
| 334 | int descriptor = transition->LastAdded(); |
| 335 | DescriptorArray* descriptors = transition->instance_descriptors(); |
| 336 | PropertyDetails details = descriptors->GetDetails(descriptor); |
| 337 | Representation representation = details.representation(); |
| 338 | DCHECK(!representation.IsNone()); |
| 339 | |
| 340 | if (details.type() == CONSTANT) { |
| 341 | Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); |
| 342 | __ li(scratch1, constant); |
| 343 | __ Branch(miss_label, ne, value_reg, Operand(scratch1)); |
| 344 | } else if (representation.IsSmi()) { |
| 345 | __ JumpIfNotSmi(value_reg, miss_label); |
| 346 | } else if (representation.IsHeapObject()) { |
| 347 | __ JumpIfSmi(value_reg, miss_label); |
| 348 | HeapType* field_type = descriptors->GetFieldType(descriptor); |
| 349 | HeapType::Iterator<Map> it = field_type->Classes(); |
| 350 | Handle<Map> current; |
| 351 | if (!it.Done()) { |
| 352 | __ ld(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 353 | Label do_store; |
| 354 | while (true) { |
| 355 | // Do the CompareMap() directly within the Branch() functions. |
| 356 | current = it.Current(); |
| 357 | it.Advance(); |
| 358 | if (it.Done()) { |
| 359 | __ Branch(miss_label, ne, scratch1, Operand(current)); |
| 360 | break; |
| 361 | } |
| 362 | __ Branch(&do_store, eq, scratch1, Operand(current)); |
| 363 | } |
| 364 | __ bind(&do_store); |
| 365 | } |
| 366 | } else if (representation.IsDouble()) { |
| 367 | Label do_store, heap_number; |
| 368 | __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); |
| 369 | __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, |
| 370 | TAG_RESULT, MUTABLE); |
| 371 | |
| 372 | __ JumpIfNotSmi(value_reg, &heap_number); |
| 373 | __ SmiUntag(scratch1, value_reg); |
| 374 | __ mtc1(scratch1, f6); |
| 375 | __ cvt_d_w(f4, f6); |
| 376 | __ jmp(&do_store); |
| 377 | |
| 378 | __ bind(&heap_number); |
| 379 | __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label, |
| 380 | DONT_DO_SMI_CHECK); |
| 381 | __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
| 382 | |
| 383 | __ bind(&do_store); |
| 384 | __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); |
| 385 | } |
| 386 | |
| 387 | // Stub never generated for objects that require access checks. |
| 388 | DCHECK(!transition->is_access_check_needed()); |
| 389 | |
| 390 | // Perform map transition for the receiver if necessary. |
| 391 | if (details.type() == FIELD && |
| 392 | Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { |
| 393 | // The properties must be extended before we can store the value. |
| 394 | // We jump to a runtime call that extends the properties array. |
| 395 | __ push(receiver_reg); |
| 396 | __ li(a2, Operand(transition)); |
| 397 | __ Push(a2, a0); |
| 398 | __ TailCallExternalReference( |
| 399 | ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
| 400 | isolate()), |
| 401 | 3, 1); |
| 402 | return; |
| 403 | } |
| 404 | |
| 405 | // Update the map of the object. |
| 406 | __ li(scratch1, Operand(transition)); |
| 407 | __ sd(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); |
| 408 | |
| 409 | // Update the write barrier for the map field. |
| 410 | __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2, |
| 411 | kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, |
| 412 | OMIT_SMI_CHECK); |
| 413 | |
| 414 | if (details.type() == CONSTANT) { |
| 415 | DCHECK(value_reg.is(a0)); |
| 416 | __ Ret(USE_DELAY_SLOT); |
| 417 | __ mov(v0, a0); |
| 418 | return; |
| 419 | } |
| 420 | |
| 421 | int index = transition->instance_descriptors()->GetFieldIndex( |
| 422 | transition->LastAdded()); |
| 423 | |
| 424 | // Adjust for the number of properties stored in the object. Even in the |
| 425 | // face of a transition we can use the old map here because the size of the |
| 426 | // object and the number of in-object properties is not going to change. |
| 427 | index -= transition->inobject_properties(); |
| 428 | |
| 429 | // TODO(verwaest): Share this code as a code stub. |
| 430 | SmiCheck smi_check = |
| 431 | representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
| 432 | if (index < 0) { |
| 433 | // Set the property straight into the object. |
| 434 | int offset = transition->instance_size() + (index * kPointerSize); |
| 435 | if (representation.IsDouble()) { |
| 436 | __ sd(storage_reg, FieldMemOperand(receiver_reg, offset)); |
| 437 | } else { |
| 438 | __ sd(value_reg, FieldMemOperand(receiver_reg, offset)); |
| 439 | } |
| 440 | |
| 441 | if (!representation.IsSmi()) { |
| 442 | // Update the write barrier for the array address. |
| 443 | if (!representation.IsDouble()) { |
| 444 | __ mov(storage_reg, value_reg); |
| 445 | } |
| 446 | __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1, |
| 447 | kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 448 | EMIT_REMEMBERED_SET, smi_check); |
| 449 | } |
| 450 | } else { |
| 451 | // Write to the properties array. |
| 452 | int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 453 | // Get the properties array |
| 454 | __ ld(scratch1, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 455 | if (representation.IsDouble()) { |
| 456 | __ sd(storage_reg, FieldMemOperand(scratch1, offset)); |
| 457 | } else { |
| 458 | __ sd(value_reg, FieldMemOperand(scratch1, offset)); |
| 459 | } |
| 460 | |
| 461 | if (!representation.IsSmi()) { |
| 462 | // Update the write barrier for the array address. |
| 463 | if (!representation.IsDouble()) { |
| 464 | __ mov(storage_reg, value_reg); |
| 465 | } |
| 466 | __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg, |
| 467 | kRAHasNotBeenSaved, kDontSaveFPRegs, |
| 468 | EMIT_REMEMBERED_SET, smi_check); |
| 469 | } |
| 470 | } |
| 471 | |
| 472 | // Return the value (register v0). |
| 473 | DCHECK(value_reg.is(a0)); |
| 474 | __ bind(&exit); |
| 475 | __ Ret(USE_DELAY_SLOT); |
| 476 | __ mov(v0, a0); |
| 477 | } |
| 478 | |
| 479 | |
| 480 | void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, |
| 481 | Register value_reg, |
| 482 | Label* miss_label) { |
| 483 | DCHECK(lookup->representation().IsHeapObject()); |
| 484 | __ JumpIfSmi(value_reg, miss_label); |
| 485 | HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); |
| 486 | __ ld(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 487 | Label do_store; |
| 488 | Handle<Map> current; |
| 489 | while (true) { |
| 490 | // Do the CompareMap() directly within the Branch() functions. |
| 491 | current = it.Current(); |
| 492 | it.Advance(); |
| 493 | if (it.Done()) { |
| 494 | __ Branch(miss_label, ne, scratch1(), Operand(current)); |
| 495 | break; |
| 496 | } |
| 497 | __ Branch(&do_store, eq, scratch1(), Operand(current)); |
| 498 | } |
| 499 | __ bind(&do_store); |
| 500 | |
| 501 | StoreFieldStub stub(isolate(), lookup->GetFieldIndex(), |
| 502 | lookup->representation()); |
| 503 | GenerateTailCall(masm(), stub.GetCode()); |
| 504 | } |
| 505 | |
| 506 | |
| 507 | Register PropertyHandlerCompiler::CheckPrototypes( |
| 508 | Register object_reg, Register holder_reg, Register scratch1, |
| 509 | Register scratch2, Handle<Name> name, Label* miss, |
| 510 | PrototypeCheckType check) { |
| 511 | Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); |
| 512 | |
| 513 | // Make sure there's no overlap between holder and object registers. |
| 514 | DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); |
| 515 | DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && |
| 516 | !scratch2.is(scratch1)); |
| 517 | |
| 518 | // Keep track of the current object in register reg. |
| 519 | Register reg = object_reg; |
| 520 | int depth = 0; |
| 521 | |
| 522 | Handle<JSObject> current = Handle<JSObject>::null(); |
| 523 | if (type()->IsConstant()) { |
| 524 | current = Handle<JSObject>::cast(type()->AsConstant()->Value()); |
| 525 | } |
| 526 | Handle<JSObject> prototype = Handle<JSObject>::null(); |
| 527 | Handle<Map> current_map = receiver_map; |
| 528 | Handle<Map> holder_map(holder()->map()); |
| 529 | // Traverse the prototype chain and check the maps in the prototype chain for |
| 530 | // fast and global objects or do negative lookup for normal objects. |
| 531 | while (!current_map.is_identical_to(holder_map)) { |
| 532 | ++depth; |
| 533 | |
| 534 | // Only global objects and objects that do not require access |
| 535 | // checks are allowed in stubs. |
| 536 | DCHECK(current_map->IsJSGlobalProxyMap() || |
| 537 | !current_map->is_access_check_needed()); |
| 538 | |
| 539 | prototype = handle(JSObject::cast(current_map->prototype())); |
| 540 | if (current_map->is_dictionary_map() && |
| 541 | !current_map->IsJSGlobalObjectMap()) { |
| 542 | DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. |
| 543 | if (!name->IsUniqueName()) { |
| 544 | DCHECK(name->IsString()); |
| 545 | name = factory()->InternalizeString(Handle<String>::cast(name)); |
| 546 | } |
| 547 | DCHECK(current.is_null() || |
| 548 | current->property_dictionary()->FindEntry(name) == |
| 549 | NameDictionary::kNotFound); |
| 550 | |
| 551 | GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, |
| 552 | scratch2); |
| 553 | |
| 554 | __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 555 | reg = holder_reg; // From now on the object will be in holder_reg. |
| 556 | __ ld(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 557 | } else { |
| 558 | // Two possible reasons for loading the prototype from the map: |
| 559 | // (1) Can't store references to new space in code. |
| 560 | // (2) Handler is shared for all receivers with the same prototype |
| 561 | // map (but not necessarily the same prototype instance). |
| 562 | bool load_prototype_from_map = |
| 563 | heap()->InNewSpace(*prototype) || depth == 1; |
| 564 | Register map_reg = scratch1; |
| 565 | if (depth != 1 || check == CHECK_ALL_MAPS) { |
| 566 | // CheckMap implicitly loads the map of |reg| into |map_reg|. |
| 567 | __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); |
| 568 | } else { |
| 569 | __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 570 | } |
| 571 | |
| 572 | // Check access rights to the global object. This has to happen after |
| 573 | // the map check so that we know that the object is actually a global |
| 574 | // object. |
| 575 | // This allows us to install generated handlers for accesses to the |
| 576 | // global proxy (as opposed to using slow ICs). See corresponding code |
| 577 | // in LookupForRead(). |
| 578 | if (current_map->IsJSGlobalProxyMap()) { |
| 579 | __ CheckAccessGlobalProxy(reg, scratch2, miss); |
| 580 | } else if (current_map->IsJSGlobalObjectMap()) { |
| 581 | GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), |
| 582 | name, scratch2, miss); |
| 583 | } |
| 584 | |
| 585 | reg = holder_reg; // From now on the object will be in holder_reg. |
| 586 | |
| 587 | if (load_prototype_from_map) { |
| 588 | __ ld(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); |
| 589 | } else { |
| 590 | __ li(reg, Operand(prototype)); |
| 591 | } |
| 592 | } |
| 593 | |
| 594 | // Go to the next object in the prototype chain. |
| 595 | current = prototype; |
| 596 | current_map = handle(current->map()); |
| 597 | } |
| 598 | |
| 599 | // Log the check depth. |
| 600 | LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); |
| 601 | |
| 602 | if (depth != 0 || check == CHECK_ALL_MAPS) { |
| 603 | // Check the holder map. |
| 604 | __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); |
| 605 | } |
| 606 | |
| 607 | // Perform security check for access to the global object. |
| 608 | DCHECK(current_map->IsJSGlobalProxyMap() || |
| 609 | !current_map->is_access_check_needed()); |
| 610 | if (current_map->IsJSGlobalProxyMap()) { |
| 611 | __ CheckAccessGlobalProxy(reg, scratch1, miss); |
| 612 | } |
| 613 | |
| 614 | // Return the register containing the holder. |
| 615 | return reg; |
| 616 | } |
| 617 | |
| 618 | |
| 619 | void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { |
| 620 | if (!miss->is_unused()) { |
| 621 | Label success; |
| 622 | __ Branch(&success); |
| 623 | __ bind(miss); |
| 624 | TailCallBuiltin(masm(), MissBuiltin(kind())); |
| 625 | __ bind(&success); |
| 626 | } |
| 627 | } |
| 628 | |
| 629 | |
| 630 | void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { |
| 631 | if (!miss->is_unused()) { |
| 632 | Label success; |
| 633 | __ Branch(&success); |
| 634 | GenerateRestoreName(miss, name); |
| 635 | TailCallBuiltin(masm(), MissBuiltin(kind())); |
| 636 | __ bind(&success); |
| 637 | } |
| 638 | } |
| 639 | |
| 640 | |
| 641 | void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { |
| 642 | // Return the constant value. |
| 643 | __ li(v0, value); |
| 644 | __ Ret(); |
| 645 | } |
| 646 | |
| 647 | |
| 648 | void NamedLoadHandlerCompiler::GenerateLoadCallback( |
| 649 | Register reg, Handle<ExecutableAccessorInfo> callback) { |
| 650 | // Build AccessorInfo::args_ list on the stack and push property name below |
| 651 | // the exit frame to make GC aware of them and store pointers to them. |
| 652 | STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); |
| 653 | STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); |
| 654 | STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); |
| 655 | STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); |
| 656 | STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); |
| 657 | STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); |
| 658 | STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); |
| 659 | DCHECK(!scratch2().is(reg)); |
| 660 | DCHECK(!scratch3().is(reg)); |
| 661 | DCHECK(!scratch4().is(reg)); |
| 662 | __ push(receiver()); |
| 663 | if (heap()->InNewSpace(callback->data())) { |
| 664 | __ li(scratch3(), callback); |
| 665 | __ ld(scratch3(), |
| 666 | FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset)); |
| 667 | } else { |
| 668 | __ li(scratch3(), Handle<Object>(callback->data(), isolate())); |
| 669 | } |
| 670 | __ Dsubu(sp, sp, 6 * kPointerSize); |
| 671 | __ sd(scratch3(), MemOperand(sp, 5 * kPointerSize)); |
| 672 | __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); |
| 673 | __ sd(scratch3(), MemOperand(sp, 4 * kPointerSize)); |
| 674 | __ sd(scratch3(), MemOperand(sp, 3 * kPointerSize)); |
| 675 | __ li(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); |
| 676 | __ sd(scratch4(), MemOperand(sp, 2 * kPointerSize)); |
| 677 | __ sd(reg, MemOperand(sp, 1 * kPointerSize)); |
| 678 | __ sd(name(), MemOperand(sp, 0 * kPointerSize)); |
| 679 | __ Daddu(scratch2(), sp, 1 * kPointerSize); |
| 680 | |
| 681 | __ mov(a2, scratch2()); // Saved in case scratch2 == a1. |
| 682 | // Abi for CallApiGetter. |
| 683 | Register getter_address_reg = ApiGetterDescriptor::function_address(); |
| 684 | |
| 685 | Address getter_address = v8::ToCData<Address>(callback->getter()); |
| 686 | ApiFunction fun(getter_address); |
| 687 | ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; |
| 688 | ExternalReference ref = ExternalReference(&fun, type, isolate()); |
| 689 | __ li(getter_address_reg, Operand(ref)); |
| 690 | |
| 691 | CallApiGetterStub stub(isolate()); |
| 692 | __ TailCallStub(&stub); |
| 693 | } |
| 694 | |
| 695 | |
| 696 | void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( |
| 697 | LookupIterator* it, Register holder_reg) { |
| 698 | DCHECK(holder()->HasNamedInterceptor()); |
| 699 | DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); |
| 700 | |
| 701 | // Compile the interceptor call, followed by inline code to load the |
| 702 | // property from further up the prototype chain if the call fails. |
| 703 | // Check that the maps haven't changed. |
| 704 | DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); |
| 705 | |
| 706 | // Preserve the receiver register explicitly whenever it is different from the |
| 707 | // holder and it is needed should the interceptor return without any result. |
| 708 | // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD |
| 709 | // case might cause a miss during the prototype check. |
| 710 | bool must_perform_prototype_check = |
| 711 | !holder().is_identical_to(it->GetHolder<JSObject>()); |
| 712 | bool must_preserve_receiver_reg = |
| 713 | !receiver().is(holder_reg) && |
| 714 | (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check); |
| 715 | |
| 716 | // Save necessary data before invoking an interceptor. |
| 717 | // Requires a frame to make GC aware of pushed pointers. |
| 718 | { |
| 719 | FrameScope frame_scope(masm(), StackFrame::INTERNAL); |
| 720 | if (must_preserve_receiver_reg) { |
| 721 | __ Push(receiver(), holder_reg, this->name()); |
| 722 | } else { |
| 723 | __ Push(holder_reg, this->name()); |
| 724 | } |
| 725 | // Invoke an interceptor. Note: map checks from receiver to |
| 726 | // interceptor's holder has been compiled before (see a caller |
| 727 | // of this method). |
| 728 | CompileCallLoadPropertyWithInterceptor( |
| 729 | masm(), receiver(), holder_reg, this->name(), holder(), |
| 730 | IC::kLoadPropertyWithInterceptorOnly); |
| 731 | |
| 732 | // Check if interceptor provided a value for property. If it's |
| 733 | // the case, return immediately. |
| 734 | Label interceptor_failed; |
| 735 | __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex); |
| 736 | __ Branch(&interceptor_failed, eq, v0, Operand(scratch1())); |
| 737 | frame_scope.GenerateLeaveFrame(); |
| 738 | __ Ret(); |
| 739 | |
| 740 | __ bind(&interceptor_failed); |
| 741 | if (must_preserve_receiver_reg) { |
| 742 | __ Pop(receiver(), holder_reg, this->name()); |
| 743 | } else { |
| 744 | __ Pop(holder_reg, this->name()); |
| 745 | } |
| 746 | // Leave the internal frame. |
| 747 | } |
| 748 | |
| 749 | GenerateLoadPostInterceptor(it, holder_reg); |
| 750 | } |
| 751 | |
| 752 | |
| 753 | void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { |
| 754 | // Call the runtime system to load the interceptor. |
| 755 | DCHECK(holder()->HasNamedInterceptor()); |
| 756 | DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); |
| 757 | PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), |
| 758 | holder()); |
| 759 | |
| 760 | ExternalReference ref = ExternalReference( |
| 761 | IC_Utility(IC::kLoadPropertyWithInterceptor), isolate()); |
| 762 | __ TailCallExternalReference( |
| 763 | ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); |
| 764 | } |
| 765 | |
| 766 | |
| 767 | Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( |
| 768 | Handle<JSObject> object, Handle<Name> name, |
| 769 | Handle<ExecutableAccessorInfo> callback) { |
| 770 | Register holder_reg = Frontend(receiver(), name); |
| 771 | |
| 772 | __ Push(receiver(), holder_reg); // Receiver. |
| 773 | __ li(at, Operand(callback)); // Callback info. |
| 774 | __ push(at); |
| 775 | __ li(at, Operand(name)); |
| 776 | __ Push(at, value()); |
| 777 | |
| 778 | // Do tail-call to the runtime system. |
| 779 | ExternalReference store_callback_property = |
| 780 | ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); |
| 781 | __ TailCallExternalReference(store_callback_property, 5, 1); |
| 782 | |
| 783 | // Return the generated code. |
| 784 | return GetCode(kind(), Code::FAST, name); |
| 785 | } |
| 786 | |
| 787 | |
| 788 | Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor( |
| 789 | Handle<Name> name) { |
| 790 | __ Push(receiver(), this->name(), value()); |
| 791 | |
| 792 | // Do tail-call to the runtime system. |
| 793 | ExternalReference store_ic_property = ExternalReference( |
| 794 | IC_Utility(IC::kStorePropertyWithInterceptor), isolate()); |
| 795 | __ TailCallExternalReference(store_ic_property, 3, 1); |
| 796 | |
| 797 | // Return the generated code. |
| 798 | return GetCode(kind(), Code::FAST, name); |
| 799 | } |
| 800 | |
| 801 | |
| 802 | Register NamedStoreHandlerCompiler::value() { |
| 803 | return StoreDescriptor::ValueRegister(); |
| 804 | } |
| 805 | |
| 806 | |
| 807 | Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( |
| 808 | Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { |
| 809 | Label miss; |
| 810 | |
| 811 | FrontendHeader(receiver(), name, &miss); |
| 812 | |
| 813 | // Get the value from the cell. |
| 814 | Register result = StoreDescriptor::ValueRegister(); |
| 815 | __ li(result, Operand(cell)); |
| 816 | __ ld(result, FieldMemOperand(result, Cell::kValueOffset)); |
| 817 | |
| 818 | // Check for deleted property if property can actually be deleted. |
| 819 | if (is_configurable) { |
| 820 | __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 821 | __ Branch(&miss, eq, result, Operand(at)); |
| 822 | } |
| 823 | |
| 824 | Counters* counters = isolate()->counters(); |
| 825 | __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3); |
| 826 | __ Ret(USE_DELAY_SLOT); |
| 827 | __ mov(v0, result); |
| 828 | |
| 829 | FrontendFooter(name, &miss); |
| 830 | |
| 831 | // Return the generated code. |
| 832 | return GetCode(kind(), Code::NORMAL, name); |
| 833 | } |
| 834 | |
| 835 | |
| 836 | #undef __ |
| 837 | } |
| 838 | } // namespace v8::internal |
| 839 | |
| 840 | #endif // V8_TARGET_ARCH_MIPS64 |