blob: 49af11213144b78cf2ce4adfd6d5507950d14a0f [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "src/ic/handler-compiler.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +01008
Ben Murdochda12d292016-06-02 14:46:10 +01009#include "src/api-arguments.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +010010#include "src/field-type.h"
11#include "src/ic/call-optimization.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/ic/ic.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/isolate-inl.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014
15namespace v8 {
16namespace internal {
17
18#define __ ACCESS_MASM(masm)
19
20
21void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
23 int accessor_index, int expected_arguments, Register scratch) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040024 // ----------- S t a t e -------------
25 // -- r3 : receiver
26 // -- r5 : name
27 // -- lr : return address
28 // -----------------------------------
29 {
30 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
31
Ben Murdochda12d292016-06-02 14:46:10 +010032 // Save context register
33 __ push(cp);
34
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 if (accessor_index >= 0) {
36 DCHECK(!holder.is(scratch));
37 DCHECK(!receiver.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 // Call the JavaScript getter with the receiver on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000039 if (map->IsJSGlobalObjectMap()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040040 // Swap in the global receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 __ LoadP(scratch,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 receiver = scratch;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040044 }
45 __ push(receiver);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 __ LoadAccessor(r4, holder, accessor_index, ACCESSOR_GETTER);
Ben Murdochda12d292016-06-02 14:46:10 +010047 __ li(r3, Operand::Zero());
48 __ Call(masm->isolate()->builtins()->CallFunction(
49 ConvertReceiverMode::kNotNullOrUndefined),
50 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040051 } else {
52 // If we generate a global code snippet for deoptimization only, remember
53 // the place to continue after deoptimization.
54 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
55 }
56
57 // Restore context register.
Ben Murdochda12d292016-06-02 14:46:10 +010058 __ pop(cp);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 }
60 __ Ret();
61}
62
63
64void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000065 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
66 int accessor_index, int expected_arguments, Register scratch) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040067 // ----------- S t a t e -------------
68 // -- lr : return address
69 // -----------------------------------
70 {
71 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
72
Ben Murdochda12d292016-06-02 14:46:10 +010073 // Save context register
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 // Save value register, so we can restore it later.
Ben Murdochda12d292016-06-02 14:46:10 +010075 __ Push(cp, value());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040076
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000077 if (accessor_index >= 0) {
78 DCHECK(!holder.is(scratch));
79 DCHECK(!receiver.is(scratch));
80 DCHECK(!value().is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040081 // Call the JavaScript setter with receiver and value on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082 if (map->IsJSGlobalObjectMap()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040083 // Swap in the global receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000084 __ LoadP(scratch,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040085 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000086 receiver = scratch;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040087 }
88 __ Push(receiver, value());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 __ LoadAccessor(r4, holder, accessor_index, ACCESSOR_SETTER);
Ben Murdochda12d292016-06-02 14:46:10 +010090 __ li(r3, Operand(1));
91 __ Call(masm->isolate()->builtins()->CallFunction(
92 ConvertReceiverMode::kNotNullOrUndefined),
93 RelocInfo::CODE_TARGET);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040094 } else {
95 // If we generate a global code snippet for deoptimization only, remember
96 // the place to continue after deoptimization.
97 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
98 }
99
100 // We have to return the passed value, not the return value of the setter.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400101 // Restore context register.
Ben Murdochda12d292016-06-02 14:46:10 +0100102 __ Pop(cp, r3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400103 }
104 __ Ret();
105}
106
107
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000108void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
109 Register slot) {
110 MacroAssembler* masm = this->masm();
111 __ Push(vector, slot);
112}
113
114
115void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
116 MacroAssembler* masm = this->masm();
117 __ Pop(vector, slot);
118}
119
120
121void PropertyHandlerCompiler::DiscardVectorAndSlot() {
122 MacroAssembler* masm = this->masm();
123 // Remove vector and slot.
124 __ addi(sp, sp, Operand(2 * kPointerSize));
125}
126
127
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400128void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
129 MacroAssembler* masm, Label* miss_label, Register receiver,
130 Handle<Name> name, Register scratch0, Register scratch1) {
131 DCHECK(name->IsUniqueName());
132 DCHECK(!receiver.is(scratch0));
133 Counters* counters = masm->isolate()->counters();
134 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
135 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
136
137 Label done;
138
139 const int kInterceptorOrAccessCheckNeededMask =
140 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
141
142 // Bail out if the receiver has a named interceptor or requires access checks.
143 Register map = scratch1;
144 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
145 __ lbz(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
146 __ andi(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
147 __ bne(miss_label, cr0);
148
149 // Check that receiver is a JSObject.
150 __ lbz(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 __ cmpi(scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400152 __ blt(miss_label);
153
154 // Load properties array.
155 Register properties = scratch0;
156 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
157 // Check that the properties array is a dictionary.
158 __ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset));
159 Register tmp = properties;
160 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
161 __ cmp(map, tmp);
162 __ bne(miss_label);
163
164 // Restore the temporarily used register.
165 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
166
167
168 NameDictionaryLookupStub::GenerateNegativeLookup(
169 masm, miss_label, &done, receiver, properties, name, scratch1);
170 __ bind(&done);
171 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
172}
173
174
175void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 MacroAssembler* masm, int index, Register result, Label* miss) {
177 __ LoadNativeContextSlot(index, result);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400178 // Load its initial map. The global functions all have initial maps.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000179 __ LoadP(result,
180 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400181 // Load the prototype from the initial map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000182 __ LoadP(result, FieldMemOperand(result, Map::kPrototypeOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400183}
184
185
186void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
187 MacroAssembler* masm, Register receiver, Register scratch1,
188 Register scratch2, Label* miss_label) {
189 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
190 __ mr(r3, scratch1);
191 __ Ret();
192}
193
194
195// Generate code to check that a global property cell is empty. Create
196// the property cell at compilation time if no cell exists for the
197// property.
198void PropertyHandlerCompiler::GenerateCheckPropertyCell(
199 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
200 Register scratch, Label* miss) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000201 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400202 DCHECK(cell->value()->IsTheHole());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
204 __ LoadWeakValue(scratch, weak_cell, miss);
205 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400206 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
207 __ cmp(scratch, ip);
208 __ bne(miss);
209}
210
211
212static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
213 Register holder, Register name,
214 Handle<JSObject> holder_obj) {
215 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000216 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
217 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
218 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400219 __ push(name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400220 __ push(receiver);
221 __ push(holder);
222}
223
224
225static void CompileCallLoadPropertyWithInterceptor(
226 MacroAssembler* masm, Register receiver, Register holder, Register name,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000227 Handle<JSObject> holder_obj, Runtime::FunctionId id) {
228 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
229 Runtime::FunctionForId(id)->nargs);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400230 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000231 __ CallRuntime(id);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400232}
233
234
235// Generate call to api function.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000236void PropertyHandlerCompiler::GenerateApiAccessorCall(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400237 MacroAssembler* masm, const CallOptimization& optimization,
238 Handle<Map> receiver_map, Register receiver, Register scratch_in,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000239 bool is_store, Register store_parameter, Register accessor_holder,
240 int accessor_index) {
241 DCHECK(!accessor_holder.is(scratch_in));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400242 DCHECK(!receiver.is(scratch_in));
243 __ push(receiver);
244 // Write the arguments to stack frame.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 if (is_store) {
246 DCHECK(!receiver.is(store_parameter));
247 DCHECK(!scratch_in.is(store_parameter));
248 __ push(store_parameter);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400249 }
250 DCHECK(optimization.is_simple_api_call());
251
Ben Murdochda12d292016-06-02 14:46:10 +0100252 // Abi for CallApiCallbackStub.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400253 Register callee = r3;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254 Register data = r7;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400255 Register holder = r5;
256 Register api_function_address = r4;
257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 // Put callee in place.
259 __ LoadAccessor(callee, accessor_holder, accessor_index,
260 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
261
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400262 // Put holder in place.
263 CallOptimization::HolderLookup holder_lookup;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000264 int holder_depth = 0;
265 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
266 &holder_depth);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 switch (holder_lookup) {
268 case CallOptimization::kHolderIsReceiver:
269 __ Move(holder, receiver);
270 break;
271 case CallOptimization::kHolderFound:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 __ LoadP(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
273 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
274 for (int i = 1; i < holder_depth; i++) {
275 __ LoadP(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
276 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
277 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400278 break;
279 case CallOptimization::kHolderNotFound:
280 UNREACHABLE();
281 break;
282 }
283
284 Isolate* isolate = masm->isolate();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400285 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400286 bool call_data_undefined = false;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 // Put call data in place.
288 if (api_call_info->data()->IsUndefined()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400289 call_data_undefined = true;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290 __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400291 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100292 if (optimization.is_constant_call()) {
293 __ LoadP(data,
294 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
295 __ LoadP(data,
296 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
297 __ LoadP(data,
298 FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
299 } else {
300 __ LoadP(data,
301 FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
302 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 __ LoadP(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
304 }
305
306 if (api_call_info->fast_handler()->IsCode()) {
307 // Just tail call into the fast handler if present.
308 __ Jump(handle(Code::cast(api_call_info->fast_handler())),
309 RelocInfo::CODE_TARGET);
310 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311 }
312
313 // Put api_function_address in place.
314 Address function_address = v8::ToCData<Address>(api_call_info->callback());
315 ApiFunction fun(function_address);
316 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
317 ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
318 __ mov(api_function_address, Operand(ref));
319
320 // Jump to stub.
Ben Murdochda12d292016-06-02 14:46:10 +0100321 CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100322 !optimization.is_constant_call());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400323 __ TailCallStub(&stub);
324}
325
326
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000327static void StoreIC_PushArgs(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400328 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 StoreDescriptor::ValueRegister(),
330 VectorStoreICDescriptor::SlotRegister(),
331 VectorStoreICDescriptor::VectorRegister());
332}
333
334
335void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
336 StoreIC_PushArgs(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400337
338 // The slow case calls into the runtime to complete the store without causing
339 // an IC miss that would otherwise cause a transition to the generic stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 __ TailCallRuntime(Runtime::kStoreIC_Slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341}
342
343
344void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 StoreIC_PushArgs(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346
347 // The slow case calls into the runtime to complete the store without causing
348 // an IC miss that would otherwise cause a transition to the generic stub.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400350}
351
352
353#undef __
354#define __ ACCESS_MASM(masm())
355
356
357void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
358 Handle<Name> name) {
359 if (!label->is_unused()) {
360 __ bind(label);
361 __ mov(this->name(), Operand(name));
362 }
363}
364
365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400367 __ mov(this->name(), Operand(name));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400368}
369
370
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000371void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
372 Register current_map, Register destination_map) {
373 DCHECK(false); // Not implemented.
374}
375
376
377void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
378 Register map_reg,
379 Register scratch,
380 Label* miss) {
381 Handle<WeakCell> cell = Map::WeakCellForMap(transition);
382 DCHECK(!map_reg.is(scratch));
383 __ LoadWeakValue(map_reg, cell, miss);
384 if (transition->CanBeDeprecated()) {
385 __ lwz(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
386 __ DecodeField<Map::Deprecated>(r0, scratch, SetRC);
387 __ bne(miss, cr0);
388 }
389}
390
391
392void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
393 int descriptor,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400394 Register value_reg,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 Register scratch,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400396 Label* miss_label) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000397 DCHECK(!map_reg.is(scratch));
398 DCHECK(!map_reg.is(value_reg));
399 DCHECK(!value_reg.is(scratch));
400 __ LoadInstanceDescriptors(map_reg, scratch);
401 __ LoadP(scratch, FieldMemOperand(
402 scratch, DescriptorArray::GetValueOffset(descriptor)));
403 __ cmp(value_reg, scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400404 __ bne(miss_label);
405}
406
Ben Murdoch097c5b22016-05-18 11:27:45 +0100407void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400408 Register value_reg,
409 Label* miss_label) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000410 Register map_reg = scratch1();
411 Register scratch = scratch2();
412 DCHECK(!value_reg.is(map_reg));
413 DCHECK(!value_reg.is(scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400414 __ JumpIfSmi(value_reg, miss_label);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100415 if (field_type->IsClass()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416 __ LoadP(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100417 __ CmpWeakValue(map_reg, Map::WeakCellForMap(field_type->AsClass()),
418 scratch);
419 __ bne(miss_label);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400420 }
421}
422
423
424Register PropertyHandlerCompiler::CheckPrototypes(
425 Register object_reg, Register holder_reg, Register scratch1,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
427 ReturnHolder return_what) {
428 Handle<Map> receiver_map = map();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400429
430 // Make sure there's no overlap between holder and object registers.
431 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
432 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
433 !scratch2.is(scratch1));
434
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 if (FLAG_eliminate_prototype_chain_checks) {
436 Handle<Cell> validity_cell =
437 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
438 if (!validity_cell.is_null()) {
439 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
440 validity_cell->value());
441 __ mov(scratch1, Operand(validity_cell));
442 __ LoadP(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
443 __ CmpSmiLiteral(scratch1, Smi::FromInt(Map::kPrototypeChainValid), r0);
444 __ bne(miss);
445 }
446
447 // The prototype chain of primitives (and their JSValue wrappers) depends
448 // on the native context, which can't be guarded by validity cells.
449 // |object_reg| holds the native context specific prototype in this case;
450 // we need to check its map.
451 if (check == CHECK_ALL_MAPS) {
452 __ LoadP(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
453 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
454 __ CmpWeakValue(scratch1, cell, scratch2);
455 __ b(ne, miss);
456 }
457 }
458
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400459 // Keep track of the current object in register reg.
460 Register reg = object_reg;
461 int depth = 0;
462
463 Handle<JSObject> current = Handle<JSObject>::null();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000464 if (receiver_map->IsJSGlobalObjectMap()) {
465 current = isolate()->global_object();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400466 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000467 // Check access rights to the global object. This has to happen after
468 // the map check so that we know that the object is actually a global
469 // object.
470 // This allows us to install generated handlers for accesses to the
471 // global proxy (as opposed to using slow ICs). See corresponding code
472 // in LookupForRead().
473 if (receiver_map->IsJSGlobalProxyMap()) {
474 __ CheckAccessGlobalProxy(reg, scratch2, miss);
475 }
476
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400477 Handle<JSObject> prototype = Handle<JSObject>::null();
478 Handle<Map> current_map = receiver_map;
479 Handle<Map> holder_map(holder()->map());
480 // Traverse the prototype chain and check the maps in the prototype chain for
481 // fast and global objects or do negative lookup for normal objects.
482 while (!current_map.is_identical_to(holder_map)) {
483 ++depth;
484
485 // Only global objects and objects that do not require access
486 // checks are allowed in stubs.
487 DCHECK(current_map->IsJSGlobalProxyMap() ||
488 !current_map->is_access_check_needed());
489
490 prototype = handle(JSObject::cast(current_map->prototype()));
491 if (current_map->is_dictionary_map() &&
492 !current_map->IsJSGlobalObjectMap()) {
493 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
494 if (!name->IsUniqueName()) {
495 DCHECK(name->IsString());
496 name = factory()->InternalizeString(Handle<String>::cast(name));
497 }
498 DCHECK(current.is_null() ||
499 current->property_dictionary()->FindEntry(name) ==
500 NameDictionary::kNotFound);
501
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000502 if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
503 // TODO(jkummerow): Cache and re-use weak cell.
504 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
505 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400506 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
507 scratch2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 if (!FLAG_eliminate_prototype_chain_checks) {
509 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
510 __ LoadP(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
511 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 } else {
513 Register map_reg = scratch1;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000514 if (!FLAG_eliminate_prototype_chain_checks) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400515 __ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
516 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 if (current_map->IsJSGlobalObjectMap()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400518 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
519 name, scratch2, miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 } else if (!FLAG_eliminate_prototype_chain_checks &&
521 (depth != 1 || check == CHECK_ALL_MAPS)) {
522 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
523 __ CmpWeakValue(map_reg, cell, scratch2);
524 __ bne(miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400525 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000526 if (!FLAG_eliminate_prototype_chain_checks) {
527 __ LoadP(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400528 }
529 }
530
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 reg = holder_reg; // From now on the object will be in holder_reg.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400532 // Go to the next object in the prototype chain.
533 current = prototype;
534 current_map = handle(current->map());
535 }
536
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537 DCHECK(!current_map->IsJSGlobalProxyMap());
538
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400539 // Log the check depth.
540 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
541
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000542 if (!FLAG_eliminate_prototype_chain_checks &&
543 (depth != 0 || check == CHECK_ALL_MAPS)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400544 // Check the holder map.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000545 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
546 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
547 __ CmpWeakValue(scratch1, cell, scratch2);
548 __ bne(miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400549 }
550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 bool return_holder = return_what == RETURN_HOLDER;
552 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
553 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400554 }
555
556 // Return the register containing the holder.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000557 return return_holder ? reg : no_reg;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400558}
559
560
561void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
562 if (!miss->is_unused()) {
563 Label success;
564 __ b(&success);
565 __ bind(miss);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566 if (IC::ICUseVector(kind())) {
567 DCHECK(kind() == Code::LOAD_IC);
568 PopVectorAndSlot();
569 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400570 TailCallBuiltin(masm(), MissBuiltin(kind()));
571 __ bind(&success);
572 }
573}
574
575
576void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
577 if (!miss->is_unused()) {
578 Label success;
579 __ b(&success);
580 GenerateRestoreName(miss, name);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000581 if (IC::ICUseVector(kind())) PopVectorAndSlot();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400582 TailCallBuiltin(masm(), MissBuiltin(kind()));
583 __ bind(&success);
584 }
585}
586
587
588void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
589 // Return the constant value.
590 __ Move(r3, value);
591 __ Ret();
592}
593
594
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400595void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
596 LookupIterator* it, Register holder_reg) {
597 DCHECK(holder()->HasNamedInterceptor());
598 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
599
600 // Compile the interceptor call, followed by inline code to load the
601 // property from further up the prototype chain if the call fails.
602 // Check that the maps haven't changed.
603 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
604
605 // Preserve the receiver register explicitly whenever it is different from the
606 // holder and it is needed should the interceptor return without any result.
607 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
608 // case might cause a miss during the prototype check.
609 bool must_perform_prototype_check =
610 !holder().is_identical_to(it->GetHolder<JSObject>());
611 bool must_preserve_receiver_reg =
612 !receiver().is(holder_reg) &&
613 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
614
615 // Save necessary data before invoking an interceptor.
616 // Requires a frame to make GC aware of pushed pointers.
617 {
618 FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
619 if (must_preserve_receiver_reg) {
620 __ Push(receiver(), holder_reg, this->name());
621 } else {
622 __ Push(holder_reg, this->name());
623 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000624 InterceptorVectorSlotPush(holder_reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400625 // Invoke an interceptor. Note: map checks from receiver to
626 // interceptor's holder has been compiled before (see a caller
627 // of this method.)
628 CompileCallLoadPropertyWithInterceptor(
629 masm(), receiver(), holder_reg, this->name(), holder(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000630 Runtime::kLoadPropertyWithInterceptorOnly);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400631
632 // Check if interceptor provided a value for property. If it's
633 // the case, return immediately.
634 Label interceptor_failed;
635 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
636 __ cmp(r3, scratch1());
637 __ beq(&interceptor_failed);
638 frame_scope.GenerateLeaveFrame();
639 __ Ret();
640
641 __ bind(&interceptor_failed);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000642 InterceptorVectorSlotPop(holder_reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400643 __ pop(this->name());
644 __ pop(holder_reg);
645 if (must_preserve_receiver_reg) {
646 __ pop(receiver());
647 }
648 // Leave the internal frame.
649 }
650
651 GenerateLoadPostInterceptor(it, holder_reg);
652}
653
654
655void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
656 // Call the runtime system to load the interceptor.
657 DCHECK(holder()->HasNamedInterceptor());
658 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
659 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
660 holder());
661
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663}
664
665
666Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
Ben Murdoch097c5b22016-05-18 11:27:45 +0100667 Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
668 LanguageMode language_mode) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669 Register holder_reg = Frontend(name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400670
671 __ Push(receiver(), holder_reg); // receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000672
673 // If the callback cannot leak, then push the callback directly,
674 // otherwise wrap it in a weak cell.
675 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
676 __ mov(ip, Operand(callback));
677 } else {
678 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
679 __ mov(ip, Operand(cell));
680 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400681 __ push(ip);
682 __ mov(ip, Operand(name));
683 __ Push(ip, value());
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 __ Push(Smi::FromInt(language_mode));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400685
686 // Do tail-call to the runtime system.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 __ TailCallRuntime(Runtime::kStoreCallbackProperty);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400688
689 // Return the generated code.
Ben Murdochc5610432016-08-08 18:44:38 +0100690 return GetCode(kind(), name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400691}
692
693
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400694Register NamedStoreHandlerCompiler::value() {
695 return StoreDescriptor::ValueRegister();
696}
697
698
699Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
700 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
701 Label miss;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000702 if (IC::ICUseVector(kind())) {
703 PushVectorAndSlot();
704 }
705 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400706
707 // Get the value from the cell.
708 Register result = StoreDescriptor::ValueRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
710 __ LoadWeakValue(result, weak_cell, &miss);
711 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400712
713 // Check for deleted property if property can actually be deleted.
714 if (is_configurable) {
715 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
716 __ cmp(result, ip);
717 __ beq(&miss);
718 }
719
720 Counters* counters = isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100721 __ IncrementCounter(counters->ic_named_load_global_stub(), 1, r4, r6);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000722 if (IC::ICUseVector(kind())) {
723 DiscardVectorAndSlot();
724 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400725 __ Ret();
726
727 FrontendFooter(name, &miss);
728
729 // Return the generated code.
Ben Murdochc5610432016-08-08 18:44:38 +0100730 return GetCode(kind(), name);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400731}
732
733
734#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000735} // namespace internal
736} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400737
738#endif // V8_TARGET_ARCH_ARM