blob: 81a9b3f65cecae4b0224a9924b555663ad0f42f9 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_MIPS64
6
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#include "src/ic/handler-compiler.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +01008
Ben Murdochda12d292016-06-02 14:46:10 +01009#include "src/api-arguments.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010010#include "src/field-type.h"
11#include "src/ic/call-optimization.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/isolate-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17
18#define __ ACCESS_MASM(masm)
19
20
21void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
23 int accessor_index, int expected_arguments, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 // ----------- S t a t e -------------
25 // -- a0 : receiver
26 // -- a2 : name
27 // -- ra : return address
28 // -----------------------------------
29 {
30 FrameScope scope(masm, StackFrame::INTERNAL);
31
Ben Murdochda12d292016-06-02 14:46:10 +010032 // Save context register
33 __ push(cp);
34
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 if (accessor_index >= 0) {
36 DCHECK(!holder.is(scratch));
37 DCHECK(!receiver.is(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038 // Call the JavaScript getter with the receiver on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 if (map->IsJSGlobalObjectMap()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040 // Swap in the global receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 __ ld(scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 receiver = scratch;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044 }
45 __ push(receiver);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
Ben Murdochda12d292016-06-02 14:46:10 +010047 __ li(a0, Operand(V8_INT64_C(0)));
48 __ Call(masm->isolate()->builtins()->CallFunction(
49 ConvertReceiverMode::kNotNullOrUndefined),
50 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 } else {
52 // If we generate a global code snippet for deoptimization only, remember
53 // the place to continue after deoptimization.
54 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
55 }
56
57 // Restore context register.
Ben Murdochda12d292016-06-02 14:46:10 +010058 __ pop(cp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 }
60 __ Ret();
61}
62
63
64void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000065 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
66 int accessor_index, int expected_arguments, Register scratch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 // ----------- S t a t e -------------
68 // -- ra : return address
69 // -----------------------------------
70 {
71 FrameScope scope(masm, StackFrame::INTERNAL);
72
Ben Murdochda12d292016-06-02 14:46:10 +010073 // Save context and value registers, so we can restore them later.
74 __ Push(cp, value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 if (accessor_index >= 0) {
77 DCHECK(!holder.is(scratch));
78 DCHECK(!receiver.is(scratch));
79 DCHECK(!value().is(scratch));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 // Call the JavaScript setter with receiver and value on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000081 if (map->IsJSGlobalObjectMap()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000082 // Swap in the global receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000083 __ ld(scratch,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 receiver = scratch;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 }
87 __ Push(receiver, value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
Ben Murdochda12d292016-06-02 14:46:10 +010089 __ li(a0, Operand(1));
90 __ Call(masm->isolate()->builtins()->CallFunction(
91 ConvertReceiverMode::kNotNullOrUndefined),
92 RelocInfo::CODE_TARGET);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 } else {
94 // If we generate a global code snippet for deoptimization only, remember
95 // the place to continue after deoptimization.
96 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
97 }
98
99 // We have to return the passed value, not the return value of the setter.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100 // Restore context register.
Ben Murdochda12d292016-06-02 14:46:10 +0100101 __ Pop(cp, v0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102 }
103 __ Ret();
104}
105
106
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400107void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
108 Register slot) {
109 MacroAssembler* masm = this->masm();
110 __ Push(vector, slot);
111}
112
113
114void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
115 MacroAssembler* masm = this->masm();
116 __ Pop(vector, slot);
117}
118
119
120void PropertyHandlerCompiler::DiscardVectorAndSlot() {
121 MacroAssembler* masm = this->masm();
122 // Remove vector and slot.
123 __ Daddu(sp, sp, Operand(2 * kPointerSize));
124}
125
126
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000127void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
128 MacroAssembler* masm, Label* miss_label, Register receiver,
129 Handle<Name> name, Register scratch0, Register scratch1) {
130 DCHECK(name->IsUniqueName());
131 DCHECK(!receiver.is(scratch0));
132 Counters* counters = masm->isolate()->counters();
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
135
136 Label done;
137
138 const int kInterceptorOrAccessCheckNeededMask =
139 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
140
141 // Bail out if the receiver has a named interceptor or requires access checks.
142 Register map = scratch1;
143 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
147
148 // Check that receiver is a JSObject.
149 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151
152 // Load properties array.
153 Register properties = scratch0;
154 __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
155 // Check that the properties array is a dictionary.
156 __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset));
157 Register tmp = properties;
158 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159 __ Branch(miss_label, ne, map, Operand(tmp));
160
161 // Restore the temporarily used register.
162 __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
163
164
165 NameDictionaryLookupStub::GenerateNegativeLookup(
166 masm, miss_label, &done, receiver, properties, name, scratch1);
167 __ bind(&done);
168 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
169}
170
171
172void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400173 MacroAssembler* masm, int index, Register result, Label* miss) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000174 __ LoadNativeContextSlot(index, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175 // Load its initial map. The global functions all have initial maps.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 __ ld(result,
177 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000178 // Load the prototype from the initial map.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400179 __ ld(result, FieldMemOperand(result, Map::kPrototypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180}
181
182
183void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
184 MacroAssembler* masm, Register receiver, Register scratch1,
185 Register scratch2, Label* miss_label) {
186 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
187 __ Ret(USE_DELAY_SLOT);
188 __ mov(v0, scratch1);
189}
190
191
192// Generate code to check that a global property cell is empty. Create
193// the property cell at compilation time if no cell exists for the
194// property.
195void PropertyHandlerCompiler::GenerateCheckPropertyCell(
196 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
197 Register scratch, Label* miss) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 DCHECK(cell->value()->IsTheHole());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
201 __ LoadWeakValue(scratch, weak_cell, miss);
202 __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000203 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
204 __ Branch(miss, ne, scratch, Operand(at));
205}
206
207
208static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
209 Register holder, Register name,
210 Handle<JSObject> holder_obj) {
211 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000212 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
213 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
214 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
215 __ Push(name, receiver, holder);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216}
217
218
219static void CompileCallLoadPropertyWithInterceptor(
220 MacroAssembler* masm, Register receiver, Register holder, Register name,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 Handle<JSObject> holder_obj, Runtime::FunctionId id) {
222 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
223 Runtime::FunctionForId(id)->nargs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 __ CallRuntime(id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000226}
227
228
229// Generate call to api function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230void PropertyHandlerCompiler::GenerateApiAccessorCall(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231 MacroAssembler* masm, const CallOptimization& optimization,
232 Handle<Map> receiver_map, Register receiver, Register scratch_in,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233 bool is_store, Register store_parameter, Register accessor_holder,
234 int accessor_index) {
235 DCHECK(!accessor_holder.is(scratch_in));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000236 DCHECK(!receiver.is(scratch_in));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000237 __ push(receiver);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 // Write the arguments to stack frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000239 if (is_store) {
240 DCHECK(!receiver.is(store_parameter));
241 DCHECK(!scratch_in.is(store_parameter));
242 __ push(store_parameter);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000243 }
244 DCHECK(optimization.is_simple_api_call());
245
Ben Murdochda12d292016-06-02 14:46:10 +0100246 // Abi for CallApiCallbackStub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000247 Register callee = a0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 Register data = a4;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 Register holder = a2;
250 Register api_function_address = a1;
251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 // Put callee in place.
253 __ LoadAccessor(callee, accessor_holder, accessor_index,
254 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
255
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 // Put holder in place.
257 CallOptimization::HolderLookup holder_lookup;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 int holder_depth = 0;
259 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
260 &holder_depth);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000261 switch (holder_lookup) {
262 case CallOptimization::kHolderIsReceiver:
263 __ Move(holder, receiver);
264 break;
265 case CallOptimization::kHolderFound:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 __ ld(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
267 __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
268 for (int i = 1; i < holder_depth; i++) {
269 __ ld(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
270 __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
271 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 break;
273 case CallOptimization::kHolderNotFound:
274 UNREACHABLE();
275 break;
276 }
277
278 Isolate* isolate = masm->isolate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000279 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000280 bool call_data_undefined = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281 // Put call data in place.
282 if (api_call_info->data()->IsUndefined()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 call_data_undefined = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 if (optimization.is_constant_call()) {
287 __ ld(data,
288 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
289 __ ld(data,
290 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
291 __ ld(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
292 } else {
293 __ ld(data,
294 FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
295 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 __ ld(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
297 }
298
299 if (api_call_info->fast_handler()->IsCode()) {
300 // Just tail call into the fast handler if present.
301 __ Jump(handle(Code::cast(api_call_info->fast_handler())),
302 RelocInfo::CODE_TARGET);
303 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000304 }
305 // Put api_function_address in place.
306 Address function_address = v8::ToCData<Address>(api_call_info->callback());
307 ApiFunction fun(function_address);
308 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
309 ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
310 __ li(api_function_address, Operand(ref));
311
312 // Jump to stub.
Ben Murdochda12d292016-06-02 14:46:10 +0100313 CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100314 !optimization.is_constant_call());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000315 __ TailCallStub(&stub);
316}
317
318
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319static void StoreIC_PushArgs(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000320 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321 StoreDescriptor::ValueRegister(),
322 VectorStoreICDescriptor::SlotRegister(),
323 VectorStoreICDescriptor::VectorRegister());
324}
325
326
327void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
328 StoreIC_PushArgs(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329
330 // The slow case calls into the runtime to complete the store without causing
331 // an IC miss that would otherwise cause a transition to the generic stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 __ TailCallRuntime(Runtime::kStoreIC_Slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000333}
334
335
336void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 StoreIC_PushArgs(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000338
339 // The slow case calls into the runtime to complete the store without causing
340 // an IC miss that would otherwise cause a transition to the generic stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000341 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342}
343
344
345#undef __
346#define __ ACCESS_MASM(masm())
347
348
349void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
350 Handle<Name> name) {
351 if (!label->is_unused()) {
352 __ bind(label);
353 __ li(this->name(), Operand(name));
354 }
355}
356
357
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400358void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
359 __ li(this->name(), Operand(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360}
361
362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000363void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
364 Register current_map, Register destination_map) {
365 DCHECK(false); // Not implemented.
366}
367
368
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400369void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000370 Register map_reg,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400371 Register scratch,
372 Label* miss) {
373 Handle<WeakCell> cell = Map::WeakCellForMap(transition);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400374 DCHECK(!map_reg.is(scratch));
375 __ LoadWeakValue(map_reg, cell, miss);
376 if (transition->CanBeDeprecated()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000377 __ lwu(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400378 __ And(at, scratch, Operand(Map::Deprecated::kMask));
379 __ Branch(miss, ne, at, Operand(zero_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400383
384void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
385 int descriptor,
386 Register value_reg,
387 Register scratch,
388 Label* miss_label) {
389 DCHECK(!map_reg.is(scratch));
390 DCHECK(!map_reg.is(value_reg));
391 DCHECK(!value_reg.is(scratch));
392 __ LoadInstanceDescriptors(map_reg, scratch);
393 __ ld(scratch,
394 FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
395 __ Branch(miss_label, ne, value_reg, Operand(scratch));
396}
397
Ben Murdoch097c5b22016-05-18 11:27:45 +0100398void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400399 Register value_reg,
400 Label* miss_label) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000401 Register map_reg = scratch1();
402 Register scratch = scratch2();
403 DCHECK(!value_reg.is(map_reg));
404 DCHECK(!value_reg.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400405 __ JumpIfSmi(value_reg, miss_label);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100406 if (field_type->IsClass()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407 __ ld(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100408 // Compare map directly within the Branch() functions.
409 __ GetWeakValue(scratch, Map::WeakCellForMap(field_type->AsClass()));
410 __ Branch(miss_label, ne, map_reg, Operand(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400411 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000412}
413
414
415Register PropertyHandlerCompiler::CheckPrototypes(
416 Register object_reg, Register holder_reg, Register scratch1,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000417 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
418 ReturnHolder return_what) {
419 Handle<Map> receiver_map = map();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420
421 // Make sure there's no overlap between holder and object registers.
422 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
423 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
424 !scratch2.is(scratch1));
425
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 if (FLAG_eliminate_prototype_chain_checks) {
427 Handle<Cell> validity_cell =
428 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
429 if (!validity_cell.is_null()) {
430 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
431 validity_cell->value());
432 __ li(scratch1, Operand(validity_cell));
433 __ ld(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
434 __ Branch(miss, ne, scratch1,
435 Operand(Smi::FromInt(Map::kPrototypeChainValid)));
436 }
437
438 // The prototype chain of primitives (and their JSValue wrappers) depends
439 // on the native context, which can't be guarded by validity cells.
440 // |object_reg| holds the native context specific prototype in this case;
441 // we need to check its map.
442 if (check == CHECK_ALL_MAPS) {
443 __ ld(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
444 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
445 __ GetWeakValue(scratch2, cell);
446 __ Branch(miss, ne, scratch1, Operand(scratch2));
447 }
448 }
449
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 // Keep track of the current object in register reg.
451 Register reg = object_reg;
452 int depth = 0;
453
454 Handle<JSObject> current = Handle<JSObject>::null();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 if (receiver_map->IsJSGlobalObjectMap()) {
456 current = isolate()->global_object();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000458
459 // Check access rights to the global object. This has to happen after
460 // the map check so that we know that the object is actually a global
461 // object.
462 // This allows us to install generated handlers for accesses to the
463 // global proxy (as opposed to using slow ICs). See corresponding code
464 // in LookupForRead().
465 if (receiver_map->IsJSGlobalProxyMap()) {
466 __ CheckAccessGlobalProxy(reg, scratch2, miss);
467 }
468
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 Handle<JSObject> prototype = Handle<JSObject>::null();
470 Handle<Map> current_map = receiver_map;
471 Handle<Map> holder_map(holder()->map());
472 // Traverse the prototype chain and check the maps in the prototype chain for
473 // fast and global objects or do negative lookup for normal objects.
474 while (!current_map.is_identical_to(holder_map)) {
475 ++depth;
476
477 // Only global objects and objects that do not require access
478 // checks are allowed in stubs.
479 DCHECK(current_map->IsJSGlobalProxyMap() ||
480 !current_map->is_access_check_needed());
481
482 prototype = handle(JSObject::cast(current_map->prototype()));
483 if (current_map->is_dictionary_map() &&
484 !current_map->IsJSGlobalObjectMap()) {
485 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
486 if (!name->IsUniqueName()) {
487 DCHECK(name->IsString());
488 name = factory()->InternalizeString(Handle<String>::cast(name));
489 }
490 DCHECK(current.is_null() ||
491 current->property_dictionary()->FindEntry(name) ==
492 NameDictionary::kNotFound);
493
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000494 if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
495 // TODO(jkummerow): Cache and re-use weak cell.
496 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
497 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
499 scratch2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000500 if (!FLAG_eliminate_prototype_chain_checks) {
501 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
502 __ ld(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
503 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 Register map_reg = scratch1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000506 if (!FLAG_eliminate_prototype_chain_checks) {
507 __ ld(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
508 }
509 if (current_map->IsJSGlobalObjectMap()) {
510 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
511 name, scratch2, miss);
512 } else if (!FLAG_eliminate_prototype_chain_checks &&
513 (depth != 1 || check == CHECK_ALL_MAPS)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400514 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
515 __ GetWeakValue(scratch2, cell);
516 __ Branch(miss, ne, scratch2, Operand(map_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 if (!FLAG_eliminate_prototype_chain_checks) {
519 __ ld(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521 }
522
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523 reg = holder_reg; // From now on the object will be in holder_reg.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 // Go to the next object in the prototype chain.
525 current = prototype;
526 current_map = handle(current->map());
527 }
528
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 DCHECK(!current_map->IsJSGlobalProxyMap());
530
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000531 // Log the check depth.
532 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
533
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534 if (!FLAG_eliminate_prototype_chain_checks &&
535 (depth != 0 || check == CHECK_ALL_MAPS)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 // Check the holder map.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400537 __ ld(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
538 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
539 __ GetWeakValue(scratch2, cell);
540 __ Branch(miss, ne, scratch2, Operand(scratch1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000541 }
542
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000543 bool return_holder = return_what == RETURN_HOLDER;
544 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
545 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 }
547
548 // Return the register containing the holder.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000549 return return_holder ? reg : no_reg;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550}
551
552
553void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
554 if (!miss->is_unused()) {
555 Label success;
556 __ Branch(&success);
557 __ bind(miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400558 if (IC::ICUseVector(kind())) {
559 DCHECK(kind() == Code::LOAD_IC);
560 PopVectorAndSlot();
561 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562 TailCallBuiltin(masm(), MissBuiltin(kind()));
563 __ bind(&success);
564 }
565}
566
567
568void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
569 if (!miss->is_unused()) {
570 Label success;
571 __ Branch(&success);
572 GenerateRestoreName(miss, name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 if (IC::ICUseVector(kind())) PopVectorAndSlot();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 TailCallBuiltin(masm(), MissBuiltin(kind()));
575 __ bind(&success);
576 }
577}
578
579
580void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
581 // Return the constant value.
582 __ li(v0, value);
583 __ Ret();
584}
585
586
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
588 LookupIterator* it, Register holder_reg) {
589 DCHECK(holder()->HasNamedInterceptor());
590 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
591
592 // Compile the interceptor call, followed by inline code to load the
593 // property from further up the prototype chain if the call fails.
594 // Check that the maps haven't changed.
595 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
596
597 // Preserve the receiver register explicitly whenever it is different from the
598 // holder and it is needed should the interceptor return without any result.
599 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
600 // case might cause a miss during the prototype check.
601 bool must_perform_prototype_check =
602 !holder().is_identical_to(it->GetHolder<JSObject>());
603 bool must_preserve_receiver_reg =
604 !receiver().is(holder_reg) &&
605 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
606
607 // Save necessary data before invoking an interceptor.
608 // Requires a frame to make GC aware of pushed pointers.
609 {
610 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
611 if (must_preserve_receiver_reg) {
612 __ Push(receiver(), holder_reg, this->name());
613 } else {
614 __ Push(holder_reg, this->name());
615 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400616 InterceptorVectorSlotPush(holder_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617 // Invoke an interceptor. Note: map checks from receiver to
618 // interceptor's holder has been compiled before (see a caller
619 // of this method).
620 CompileCallLoadPropertyWithInterceptor(
621 masm(), receiver(), holder_reg, this->name(), holder(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000622 Runtime::kLoadPropertyWithInterceptorOnly);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000623
624 // Check if interceptor provided a value for property. If it's
625 // the case, return immediately.
626 Label interceptor_failed;
627 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
628 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
629 frame_scope.GenerateLeaveFrame();
630 __ Ret();
631
632 __ bind(&interceptor_failed);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400633 InterceptorVectorSlotPop(holder_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000634 if (must_preserve_receiver_reg) {
635 __ Pop(receiver(), holder_reg, this->name());
636 } else {
637 __ Pop(holder_reg, this->name());
638 }
639 // Leave the internal frame.
640 }
641
642 GenerateLoadPostInterceptor(it, holder_reg);
643}
644
645
646void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
647 // Call the runtime system to load the interceptor.
648 DCHECK(holder()->HasNamedInterceptor());
649 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
650 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
651 holder());
652
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000653 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654}
655
656
657Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100658 Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
659 LanguageMode language_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400660 Register holder_reg = Frontend(name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000661
662 __ Push(receiver(), holder_reg); // Receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000663 // If the callback cannot leak, then push the callback directly,
664 // otherwise wrap it in a weak cell.
665 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
666 __ li(at, Operand(callback));
667 } else {
668 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
669 __ li(at, Operand(cell));
670 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 __ push(at);
672 __ li(at, Operand(name));
673 __ Push(at, value());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100674 __ Push(Smi::FromInt(language_mode));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675
676 // Do tail-call to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000677 __ TailCallRuntime(Runtime::kStoreCallbackProperty);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678
679 // Return the generated code.
Ben Murdochc5610432016-08-08 18:44:38 +0100680 return GetCode(kind(), name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681}
682
683
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684Register NamedStoreHandlerCompiler::value() {
685 return StoreDescriptor::ValueRegister();
686}
687
688
689Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
690 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
691 Label miss;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 if (IC::ICUseVector(kind())) {
693 PushVectorAndSlot();
694 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000696 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697
698 // Get the value from the cell.
699 Register result = StoreDescriptor::ValueRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400700 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
701 __ LoadWeakValue(result, weak_cell, &miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 __ ld(result, FieldMemOperand(result, PropertyCell::kValueOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703
704 // Check for deleted property if property can actually be deleted.
705 if (is_configurable) {
706 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
707 __ Branch(&miss, eq, result, Operand(at));
708 }
709
710 Counters* counters = isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100711 __ IncrementCounter(counters->ic_named_load_global_stub(), 1, a1, a3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400712 if (IC::ICUseVector(kind())) {
713 DiscardVectorAndSlot();
714 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000715 __ Ret(USE_DELAY_SLOT);
716 __ mov(v0, result);
717
718 FrontendFooter(name, &miss);
719
720 // Return the generated code.
Ben Murdochc5610432016-08-08 18:44:38 +0100721 return GetCode(kind(), name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722}
723
724
725#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726} // namespace internal
727} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000728
729#endif // V8_TARGET_ARCH_MIPS64