blob: 69b8c2fab10c1180cc940c8c0d3d2bef8fe9953f [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_ARM64
6
7#include "src/codegen.h"
8#include "src/ic/ic.h"
9#include "src/ic/ic-compiler.h"
10#include "src/ic/stub-cache.h"
11
12namespace v8 {
13namespace internal {
14
15
16#define __ ACCESS_MASM(masm)
17
18
19// "type" holds an instance type on entry and is not clobbered.
20// Generated code branch on "global_object" if type is any kind of global
21// JS object.
22static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
23 Label* global_object) {
24 __ Cmp(type, JS_GLOBAL_OBJECT_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025 __ Ccmp(type, JS_GLOBAL_PROXY_TYPE, ZFlag, ne);
26 __ B(eq, global_object);
27}
28
29
30// Helper function used from LoadIC GenerateNormal.
31//
32// elements: Property dictionary. It is not clobbered if a jump to the miss
33// label is done.
34// name: Property name. It is not clobbered if a jump to the miss label is
35// done
36// result: Register for the result. It is only updated if a jump to the miss
37// label is not done.
38// The scratch registers need to be different from elements, name and result.
39// The generated code assumes that the receiver has slow properties,
40// is not a global object and does not have interceptors.
41static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss,
42 Register elements, Register name,
43 Register result, Register scratch1,
44 Register scratch2) {
45 DCHECK(!AreAliased(elements, name, scratch1, scratch2));
46 DCHECK(!AreAliased(result, scratch1, scratch2));
47
48 Label done;
49
50 // Probe the dictionary.
51 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
52 name, scratch1, scratch2);
53
54 // If probing finds an entry check that the value is a normal property.
55 __ Bind(&done);
56
57 static const int kElementsStartOffset =
58 NameDictionary::kHeaderSize +
59 NameDictionary::kElementsStartIndex * kPointerSize;
60 static const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
61 __ Ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
62 __ Tst(scratch1, Smi::FromInt(PropertyDetails::TypeField::kMask));
63 __ B(ne, miss);
64
65 // Get the value at the masked, scaled index and return.
66 __ Ldr(result,
67 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
68}
69
70
71// Helper function used from StoreIC::GenerateNormal.
72//
73// elements: Property dictionary. It is not clobbered if a jump to the miss
74// label is done.
75// name: Property name. It is not clobbered if a jump to the miss label is
76// done
77// value: The value to store (never clobbered).
78//
79// The generated code assumes that the receiver has slow properties,
80// is not a global object and does not have interceptors.
81static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss,
82 Register elements, Register name,
83 Register value, Register scratch1,
84 Register scratch2) {
85 DCHECK(!AreAliased(elements, name, value, scratch1, scratch2));
86
87 Label done;
88
89 // Probe the dictionary.
90 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
91 name, scratch1, scratch2);
92
93 // If probing finds an entry in the dictionary check that the value
94 // is a normal property that is not read only.
95 __ Bind(&done);
96
97 static const int kElementsStartOffset =
98 NameDictionary::kHeaderSize +
99 NameDictionary::kElementsStartIndex * kPointerSize;
100 static const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
101 static const int kTypeAndReadOnlyMask =
102 PropertyDetails::TypeField::kMask |
103 PropertyDetails::AttributesField::encode(READ_ONLY);
104 __ Ldrsw(scratch1, UntagSmiFieldMemOperand(scratch2, kDetailsOffset));
105 __ Tst(scratch1, kTypeAndReadOnlyMask);
106 __ B(ne, miss);
107
108 // Store the value at the masked, scaled index and return.
109 static const int kValueOffset = kElementsStartOffset + kPointerSize;
110 __ Add(scratch2, scratch2, kValueOffset - kHeapObjectTag);
111 __ Str(value, MemOperand(scratch2));
112
113 // Update the write barrier. Make sure not to clobber the value.
114 __ Mov(scratch1, value);
115 __ RecordWrite(elements, scratch2, scratch1, kLRHasNotBeenSaved,
116 kDontSaveFPRegs);
117}
118
119
120// Checks the receiver for special cases (value type, slow case bits).
121// Falls through for regular JS object and return the map of the
122// receiver in 'map_scratch' if the receiver is not a SMI.
123static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
124 Register receiver,
125 Register map_scratch,
126 Register scratch,
127 int interceptor_bit, Label* slow) {
128 DCHECK(!AreAliased(map_scratch, scratch));
129
130 // Check that the object isn't a smi.
131 __ JumpIfSmi(receiver, slow);
132 // Get the map of the receiver.
133 __ Ldr(map_scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
134 // Check bit field.
135 __ Ldrb(scratch, FieldMemOperand(map_scratch, Map::kBitFieldOffset));
136 __ Tbnz(scratch, Map::kIsAccessCheckNeeded, slow);
137 __ Tbnz(scratch, interceptor_bit, slow);
138
139 // Check that the object is some kind of JS object EXCEPT JS Value type.
140 // In the case that the object is a value-wrapper object, we enter the
141 // runtime system to make sure that indexing into string objects work
142 // as intended.
143 STATIC_ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
144 __ Ldrb(scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset));
145 __ Cmp(scratch, JS_OBJECT_TYPE);
146 __ B(lt, slow);
147}
148
149
150// Loads an indexed element from a fast case array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151//
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000152// receiver - holds the receiver on entry.
153// Unchanged unless 'result' is the same register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154//
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000155// key - holds the smi key on entry.
156// Unchanged unless 'result' is the same register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157//
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158// elements - holds the elements of the receiver and its prototypes. Clobbered.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159//
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000160// result - holds the result on exit if the load succeeded.
161// Allowed to be the the same as 'receiver' or 'key'.
162// Unchanged on bailout so 'receiver' and 'key' can be safely
163// used by further computation.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
165 Register key, Register elements,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000166 Register scratch1, Register scratch2,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100167 Register result, Label* slow) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168 DCHECK(!AreAliased(receiver, key, elements, scratch1, scratch2));
169
170 Label check_prototypes, check_next_prototype;
171 Label done, in_bounds, absent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000172
173 // Check for fast array.
174 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000175 __ AssertFastElements(elements);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000176
177 // Check that the key (index) is within bounds.
178 __ Ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
179 __ Cmp(key, scratch1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000180 __ B(lo, &in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000182 // Out of bounds. Check the prototype chain to see if we can just return
183 // 'undefined'.
184 __ Cmp(key, Operand(Smi::FromInt(0)));
185 __ B(lt, slow); // Negative keys can't take the fast OOB path.
186 __ Bind(&check_prototypes);
187 __ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
188 __ Bind(&check_next_prototype);
189 __ Ldr(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset));
190 // scratch2: current prototype
191 __ JumpIfRoot(scratch2, Heap::kNullValueRootIndex, &absent);
192 __ Ldr(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset));
193 __ Ldr(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset));
194 // elements: elements of current prototype
195 // scratch2: map of current prototype
196 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE);
197 __ B(lo, slow);
198 __ Ldrb(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset));
199 __ Tbnz(scratch1, Map::kIsAccessCheckNeeded, slow);
200 __ Tbnz(scratch1, Map::kHasIndexedInterceptor, slow);
201 __ JumpIfNotRoot(elements, Heap::kEmptyFixedArrayRootIndex, slow);
202 __ B(&check_next_prototype);
203
204 __ Bind(&absent);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100205 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
206 __ B(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000207
208 __ Bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 // Fast case: Do the load.
210 __ Add(scratch1, elements, FixedArray::kHeaderSize - kHeapObjectTag);
211 __ SmiUntag(scratch2, key);
212 __ Ldr(scratch2, MemOperand(scratch1, scratch2, LSL, kPointerSizeLog2));
213
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000214 // In case the loaded value is the_hole we have to check the prototype chain.
215 __ JumpIfRoot(scratch2, Heap::kTheHoleValueRootIndex, &check_prototypes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216
217 // Move the value to the result register.
218 // 'result' can alias with 'receiver' or 'key' but these two must be
219 // preserved if we jump to 'slow'.
220 __ Mov(result, scratch2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221 __ Bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222}
223
224
225// Checks whether a key is an array index string or a unique name.
226// Falls through if a key is a unique name.
227// The map of the key is returned in 'map_scratch'.
228// If the jump to 'index_string' is done the hash of the key is left
229// in 'hash_scratch'.
230static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
231 Register map_scratch, Register hash_scratch,
232 Label* index_string, Label* not_unique) {
233 DCHECK(!AreAliased(key, map_scratch, hash_scratch));
234
235 // Is the key a name?
236 Label unique;
237 __ JumpIfObjectType(key, map_scratch, hash_scratch, LAST_UNIQUE_NAME_TYPE,
238 not_unique, hi);
239 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
240 __ B(eq, &unique);
241
242 // Is the string an array index with cached numeric value?
243 __ Ldr(hash_scratch.W(), FieldMemOperand(key, Name::kHashFieldOffset));
244 __ TestAndBranchIfAllClear(hash_scratch, Name::kContainsCachedArrayIndexMask,
245 index_string);
246
247 // Is the string internalized? We know it's a string, so a single bit test is
248 // enough.
249 __ Ldrb(hash_scratch, FieldMemOperand(map_scratch, Map::kInstanceTypeOffset));
250 STATIC_ASSERT(kInternalizedTag == 0);
251 __ TestAndBranchIfAnySet(hash_scratch, kIsNotInternalizedMask, not_unique);
252
253 __ Bind(&unique);
254 // Fall through if the key is a unique name.
255}
256
Ben Murdoch097c5b22016-05-18 11:27:45 +0100257void LoadIC::GenerateNormal(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 Register dictionary = x0;
259 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
260 DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
261 Label slow;
262
263 __ Ldr(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(),
264 JSObject::kPropertiesOffset));
265 GenerateDictionaryLoad(masm, &slow, dictionary,
266 LoadDescriptor::NameRegister(), x0, x3, x4);
267 __ Ret();
268
269 // Dictionary load failed, go slow (but don't miss).
270 __ Bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271 GenerateRuntimeGetProperty(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272}
273
274
275void LoadIC::GenerateMiss(MacroAssembler* masm) {
276 // The return address is in lr.
277 Isolate* isolate = masm->isolate();
278 ASM_LOCATION("LoadIC::GenerateMiss");
279
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 DCHECK(!AreAliased(x4, x5, LoadWithVectorDescriptor::SlotRegister(),
281 LoadWithVectorDescriptor::VectorRegister()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100282 __ IncrementCounter(isolate->counters()->ic_load_miss(), 1, x4, x5);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283
284 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
286 LoadWithVectorDescriptor::NameRegister(),
287 LoadWithVectorDescriptor::SlotRegister(),
288 LoadWithVectorDescriptor::VectorRegister());
289 __ TailCallRuntime(Runtime::kLoadIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000290}
291
Ben Murdoch097c5b22016-05-18 11:27:45 +0100292void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000293 // The return address is in lr.
294 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100297 __ TailCallRuntime(Runtime::kGetProperty);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298}
299
300
301void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
302 // The return address is in lr.
303 Isolate* isolate = masm->isolate();
304
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 DCHECK(!AreAliased(x10, x11, LoadWithVectorDescriptor::SlotRegister(),
306 LoadWithVectorDescriptor::VectorRegister()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100307 __ IncrementCounter(isolate->counters()->ic_keyed_load_miss(), 1, x10, x11);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000308
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309 __ Push(LoadWithVectorDescriptor::ReceiverRegister(),
310 LoadWithVectorDescriptor::NameRegister(),
311 LoadWithVectorDescriptor::SlotRegister(),
312 LoadWithVectorDescriptor::VectorRegister());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313
314 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316}
317
Ben Murdoch097c5b22016-05-18 11:27:45 +0100318void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000319 // The return address is in lr.
320 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000321
322 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100323 __ TailCallRuntime(Runtime::kKeyedGetProperty);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000324}
325
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326static void GenerateKeyedLoadWithSmiKey(MacroAssembler* masm, Register key,
327 Register receiver, Register scratch1,
328 Register scratch2, Register scratch3,
329 Register scratch4, Register scratch5,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100330 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4,
332 scratch5));
333
334 Isolate* isolate = masm->isolate();
335 Label check_number_dictionary;
336 // If we can load the value, it should be returned in x0.
337 Register result = x0;
338
339 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2,
340 Map::kHasIndexedInterceptor, slow);
341
342 // Check the receiver's map to see if it has fast elements.
343 __ CheckFastElements(scratch1, scratch2, &check_number_dictionary);
344
345 GenerateFastArrayLoad(masm, receiver, key, scratch3, scratch2, scratch1,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100346 result, slow);
347 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000348 scratch1, scratch2);
349 __ Ret();
350
351 __ Bind(&check_number_dictionary);
352 __ Ldr(scratch3, FieldMemOperand(receiver, JSObject::kElementsOffset));
353 __ Ldr(scratch2, FieldMemOperand(scratch3, JSObject::kMapOffset));
354
355 // Check whether we have a number dictionary.
356 __ JumpIfNotRoot(scratch2, Heap::kHashTableMapRootIndex, slow);
357
358 __ LoadFromNumberDictionary(slow, scratch3, key, result, scratch1, scratch2,
359 scratch4, scratch5);
360 __ Ret();
361}
362
363static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm, Register key,
364 Register receiver, Register scratch1,
365 Register scratch2, Register scratch3,
366 Register scratch4, Register scratch5,
367 Label* slow) {
368 DCHECK(!AreAliased(key, receiver, scratch1, scratch2, scratch3, scratch4,
369 scratch5));
370
371 Isolate* isolate = masm->isolate();
372 Label probe_dictionary, property_array_property;
373 // If we can load the value, it should be returned in x0.
374 Register result = x0;
375
376 GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2,
377 Map::kHasNamedInterceptor, slow);
378
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000379 // If the receiver is a fast-case object, check the stub cache. Otherwise
380 // probe the dictionary.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 __ Ldr(scratch2, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
382 __ Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset));
383 __ JumpIfRoot(scratch3, Heap::kHashTableMapRootIndex, &probe_dictionary);
384
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000385 // The handlers in the stub cache expect a vector and slot. Since we won't
386 // change the IC from any downstream misses, a dummy vector can be used.
387 Register vector = LoadWithVectorDescriptor::VectorRegister();
388 Register slot = LoadWithVectorDescriptor::SlotRegister();
389 DCHECK(!AreAliased(vector, slot, scratch1, scratch2, scratch3, scratch4));
390 Handle<TypeFeedbackVector> dummy_vector =
391 TypeFeedbackVector::DummyVector(masm->isolate());
392 int slot_index = dummy_vector->GetIndex(
393 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
394 __ LoadRoot(vector, Heap::kDummyVectorRootIndex);
395 __ Mov(slot, Operand(Smi::FromInt(slot_index)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396
Ben Murdochc5610432016-08-08 18:44:38 +0100397 Code::Flags flags =
398 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000399 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags,
400 receiver, key, scratch1,
401 scratch2, scratch3, scratch4);
402 // Cache miss.
403 KeyedLoadIC::GenerateMiss(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404
405 // Do a quick inline probe of the receiver's dictionary, if it exists.
406 __ Bind(&probe_dictionary);
407 __ Ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
408 __ Ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
409 GenerateGlobalInstanceTypeCheck(masm, scratch1, slow);
410 // Load the property.
411 GenerateDictionaryLoad(masm, slow, scratch2, key, result, scratch1, scratch3);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100412 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000413 scratch1, scratch2);
414 __ Ret();
415}
416
Ben Murdoch097c5b22016-05-18 11:27:45 +0100417void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 // The return address is in lr.
419 Label slow, check_name, index_smi, index_name;
420
421 Register key = LoadDescriptor::NameRegister();
422 Register receiver = LoadDescriptor::ReceiverRegister();
423 DCHECK(key.is(x2));
424 DCHECK(receiver.is(x1));
425
426 __ JumpIfNotSmi(key, &check_name);
427 __ Bind(&index_smi);
428 // Now the key is known to be a smi. This place is also jumped to from below
429 // where a numeric string is converted to a smi.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100430 GenerateKeyedLoadWithSmiKey(masm, key, receiver, x7, x3, x4, x5, x6, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000431
432 // Slow case.
433 __ Bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100434 __ IncrementCounter(masm->isolate()->counters()->ic_keyed_load_generic_slow(),
435 1, x4, x3);
436 GenerateRuntimeGetProperty(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437
438 __ Bind(&check_name);
439 GenerateKeyNameCheck(masm, key, x0, x3, &index_name, &slow);
440
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000441 GenerateKeyedLoadWithNameKey(masm, key, receiver, x4, x5, x6, x7, x3, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442
443 __ Bind(&index_name);
444 __ IndexFromHash(x3, key);
445 // Now jump to the place where smi keys are handled.
446 __ B(&index_smi);
447}
448
449
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000450static void StoreIC_PushArgs(MacroAssembler* masm) {
451 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
452 StoreDescriptor::ValueRegister(),
453 VectorStoreICDescriptor::SlotRegister(),
454 VectorStoreICDescriptor::VectorRegister());
455}
456
457
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
459 ASM_LOCATION("KeyedStoreIC::GenerateMiss");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460 StoreIC_PushArgs(masm);
461 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462}
463
464
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400465static void KeyedStoreGenerateMegamorphicHelper(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000466 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
467 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length,
468 Register value, Register key, Register receiver, Register receiver_map,
469 Register elements_map, Register elements) {
470 DCHECK(!AreAliased(value, key, receiver, receiver_map, elements_map, elements,
471 x10, x11));
472
473 Label transition_smi_elements;
474 Label transition_double_elements;
475 Label fast_double_without_map_check;
476 Label non_double_value;
477 Label finish_store;
478
479 __ Bind(fast_object);
480 if (check_map == kCheckMap) {
481 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
482 __ Cmp(elements_map,
483 Operand(masm->isolate()->factory()->fixed_array_map()));
484 __ B(ne, fast_double);
485 }
486
487 // HOLECHECK: guards "A[i] = V"
488 // We have to go to the runtime if the current value is the hole because there
489 // may be a callback on the element.
490 Label holecheck_passed;
491 __ Add(x10, elements, FixedArray::kHeaderSize - kHeapObjectTag);
492 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2));
493 __ Ldr(x11, MemOperand(x10));
494 __ JumpIfNotRoot(x11, Heap::kTheHoleValueRootIndex, &holecheck_passed);
495 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow);
496 __ bind(&holecheck_passed);
497
498 // Smi stores don't require further checks.
499 __ JumpIfSmi(value, &finish_store);
500
501 // Escape to elements kind transition case.
502 __ CheckFastObjectElements(receiver_map, x10, &transition_smi_elements);
503
504 __ Bind(&finish_store);
505 if (increment_length == kIncrementLength) {
506 // Add 1 to receiver->length.
507 __ Add(x10, key, Smi::FromInt(1));
508 __ Str(x10, FieldMemOperand(receiver, JSArray::kLengthOffset));
509 }
510
511 Register address = x11;
512 __ Add(address, elements, FixedArray::kHeaderSize - kHeapObjectTag);
513 __ Add(address, address, Operand::UntagSmiAndScale(key, kPointerSizeLog2));
514 __ Str(value, MemOperand(address));
515
516 Label dont_record_write;
517 __ JumpIfSmi(value, &dont_record_write);
518
519 // Update write barrier for the elements array address.
520 __ Mov(x10, value); // Preserve the value which is returned.
521 __ RecordWrite(elements, address, x10, kLRHasNotBeenSaved, kDontSaveFPRegs,
522 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
523
524 __ Bind(&dont_record_write);
525 __ Ret();
526
527
528 __ Bind(fast_double);
529 if (check_map == kCheckMap) {
530 // Check for fast double array case. If this fails, call through to the
531 // runtime.
532 __ JumpIfNotRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex, slow);
533 }
534
535 // HOLECHECK: guards "A[i] double hole?"
536 // We have to see if the double version of the hole is present. If so go to
537 // the runtime.
538 __ Add(x10, elements, FixedDoubleArray::kHeaderSize - kHeapObjectTag);
539 __ Add(x10, x10, Operand::UntagSmiAndScale(key, kPointerSizeLog2));
540 __ Ldr(x11, MemOperand(x10));
541 __ CompareAndBranch(x11, kHoleNanInt64, ne, &fast_double_without_map_check);
542 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, x10, slow);
543
544 __ Bind(&fast_double_without_map_check);
545 __ StoreNumberToDoubleElements(value, key, elements, x10, d0,
546 &transition_double_elements);
547 if (increment_length == kIncrementLength) {
548 // Add 1 to receiver->length.
549 __ Add(x10, key, Smi::FromInt(1));
550 __ Str(x10, FieldMemOperand(receiver, JSArray::kLengthOffset));
551 }
552 __ Ret();
553
554
555 __ Bind(&transition_smi_elements);
556 // Transition the array appropriately depending on the value type.
557 __ Ldr(x10, FieldMemOperand(value, HeapObject::kMapOffset));
558 __ JumpIfNotRoot(x10, Heap::kHeapNumberMapRootIndex, &non_double_value);
559
560 // Value is a double. Transition FAST_SMI_ELEMENTS ->
561 // FAST_DOUBLE_ELEMENTS and complete the store.
562 __ LoadTransitionedArrayMapConditional(
563 FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS, receiver_map, x10, x11, slow);
564 AllocationSiteMode mode =
565 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
566 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
567 receiver_map, mode, slow);
568 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
569 __ B(&fast_double_without_map_check);
570
571 __ Bind(&non_double_value);
572 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS.
573 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS,
574 receiver_map, x10, x11, slow);
575
576 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
577 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
578 masm, receiver, key, value, receiver_map, mode, slow);
579
580 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
581 __ B(&finish_store);
582
583 __ Bind(&transition_double_elements);
584 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
585 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
586 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
587 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
588 receiver_map, x10, x11, slow);
589 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
590 ElementsTransitionGenerator::GenerateDoubleToObject(
591 masm, receiver, key, value, receiver_map, mode, slow);
592 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
593 __ B(&finish_store);
594}
595
596
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400597void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598 LanguageMode language_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400599 ASM_LOCATION("KeyedStoreIC::GenerateMegamorphic");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 Label slow;
601 Label array;
602 Label fast_object;
603 Label extra;
604 Label fast_object_grow;
605 Label fast_double_grow;
606 Label fast_double;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400607 Label maybe_name_key;
608 Label miss;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609
610 Register value = StoreDescriptor::ValueRegister();
611 Register key = StoreDescriptor::NameRegister();
612 Register receiver = StoreDescriptor::ReceiverRegister();
613 DCHECK(receiver.is(x1));
614 DCHECK(key.is(x2));
615 DCHECK(value.is(x0));
616
617 Register receiver_map = x3;
618 Register elements = x4;
619 Register elements_map = x5;
620
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400621 __ JumpIfNotSmi(key, &maybe_name_key);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622 __ JumpIfSmi(receiver, &slow);
623 __ Ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
624
Ben Murdochc5610432016-08-08 18:44:38 +0100625 // Check that the receiver does not require access checks.
626 // The generic stub does not perform map checks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 __ Ldrb(x10, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100628 __ TestAndBranchIfAnySet(x10, (1 << Map::kIsAccessCheckNeeded), &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629
630 // Check if the object is a JS array or not.
631 Register instance_type = x10;
632 __ CompareInstanceType(receiver_map, instance_type, JS_ARRAY_TYPE);
633 __ B(eq, &array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 // Check that the object is some kind of JS object EXCEPT JS Value type. In
635 // the case that the object is a value-wrapper object, we enter the runtime
636 // system to make sure that indexing into string objects works as intended.
637 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
638 __ Cmp(instance_type, JS_OBJECT_TYPE);
639 __ B(lo, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640
641 // Object case: Check key against length in the elements array.
642 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
643 // Check array bounds. Both the key and the length of FixedArray are smis.
644 __ Ldrsw(x10, UntagSmiFieldMemOperand(elements, FixedArray::kLengthOffset));
645 __ Cmp(x10, Operand::UntagSmi(key));
646 __ B(hi, &fast_object);
647
648
649 __ Bind(&slow);
650 // Slow case, handle jump to runtime.
651 // Live values:
652 // x0: value
653 // x1: key
654 // x2: receiver
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000655 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400656 // Never returns to here.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000657
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400658 __ bind(&maybe_name_key);
659 __ Ldr(x10, FieldMemOperand(key, HeapObject::kMapOffset));
660 __ Ldrb(x10, FieldMemOperand(x10, Map::kInstanceTypeOffset));
661 __ JumpIfNotUniqueNameInstanceType(x10, &slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662
663 // The handlers in the stub cache expect a vector and slot. Since we won't
664 // change the IC from any downstream misses, a dummy vector can be used.
665 Register vector = VectorStoreICDescriptor::VectorRegister();
666 Register slot = VectorStoreICDescriptor::SlotRegister();
667 DCHECK(!AreAliased(vector, slot, x5, x6, x7, x8));
668 Handle<TypeFeedbackVector> dummy_vector =
669 TypeFeedbackVector::DummyVector(masm->isolate());
670 int slot_index = dummy_vector->GetIndex(
671 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
672 __ LoadRoot(vector, Heap::kDummyVectorRootIndex);
673 __ Mov(slot, Operand(Smi::FromInt(slot_index)));
674
Ben Murdochc5610432016-08-08 18:44:38 +0100675 Code::Flags flags =
676 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
677 masm->isolate()->stub_cache()->GenerateProbe(
678 masm, Code::KEYED_STORE_IC, flags, receiver, key, x5, x6, x7, x8);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400679 // Cache miss.
680 __ B(&miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681
682 __ Bind(&extra);
683 // Extra capacity case: Check if there is extra capacity to
684 // perform the store and update the length. Used for adding one
685 // element to the array by writing to array[array.length].
686
687 // Check for room in the elements backing store.
688 // Both the key and the length of FixedArray are smis.
689 __ Ldrsw(x10, UntagSmiFieldMemOperand(elements, FixedArray::kLengthOffset));
690 __ Cmp(x10, Operand::UntagSmi(key));
691 __ B(ls, &slow);
692
693 __ Ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
694 __ Cmp(elements_map, Operand(masm->isolate()->factory()->fixed_array_map()));
695 __ B(eq, &fast_object_grow);
696 __ Cmp(elements_map,
697 Operand(masm->isolate()->factory()->fixed_double_array_map()));
698 __ B(eq, &fast_double_grow);
699 __ B(&slow);
700
701
702 __ Bind(&array);
703 // Array case: Get the length and the elements array from the JS
704 // array. Check that the array is in fast mode (and writable); if it
705 // is the length is always a smi.
706
707 __ Ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
708
709 // Check the key against the length in the array.
710 __ Ldrsw(x10, UntagSmiFieldMemOperand(receiver, JSArray::kLengthOffset));
711 __ Cmp(x10, Operand::UntagSmi(key));
712 __ B(eq, &extra); // We can handle the case where we are appending 1 element.
713 __ B(lo, &slow);
714
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400715 KeyedStoreGenerateMegamorphicHelper(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 masm, &fast_object, &fast_double, &slow, kCheckMap, kDontIncrementLength,
717 value, key, receiver, receiver_map, elements_map, elements);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400718 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
719 &fast_double_grow, &slow, kDontCheckMap,
720 kIncrementLength, value, key, receiver,
721 receiver_map, elements_map, elements);
722
723 __ bind(&miss);
724 GenerateMiss(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725}
726
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727void StoreIC::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 StoreIC_PushArgs(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729
730 // Tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000731 __ TailCallRuntime(Runtime::kStoreIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000732}
733
734
735void StoreIC::GenerateNormal(MacroAssembler* masm) {
736 Label miss;
737 Register value = StoreDescriptor::ValueRegister();
738 Register receiver = StoreDescriptor::ReceiverRegister();
739 Register name = StoreDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000740 Register dictionary = x5;
741 DCHECK(!AreAliased(value, receiver, name,
742 VectorStoreICDescriptor::SlotRegister(),
743 VectorStoreICDescriptor::VectorRegister(), x5, x6, x7));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744
745 __ Ldr(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
746
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 GenerateDictionaryStore(masm, &miss, dictionary, name, value, x6, x7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000748 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100749 __ IncrementCounter(counters->ic_store_normal_hit(), 1, x6, x7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000750 __ Ret();
751
752 // Cache miss: Jump to runtime.
753 __ Bind(&miss);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100754 __ IncrementCounter(counters->ic_store_normal_miss(), 1, x6, x7);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 GenerateMiss(masm);
756}
757
758
759Condition CompareIC::ComputeCondition(Token::Value op) {
760 switch (op) {
761 case Token::EQ_STRICT:
762 case Token::EQ:
763 return eq;
764 case Token::LT:
765 return lt;
766 case Token::GT:
767 return gt;
768 case Token::LTE:
769 return le;
770 case Token::GTE:
771 return ge;
772 default:
773 UNREACHABLE();
774 return al;
775 }
776}
777
778
779bool CompareIC::HasInlinedSmiCode(Address address) {
780 // The address of the instruction following the call.
781 Address info_address = Assembler::return_address_from_call_start(address);
782
783 InstructionSequence* patch_info = InstructionSequence::At(info_address);
784 return patch_info->IsInlineData();
785}
786
787
788// Activate a SMI fast-path by patching the instructions generated by
789// JumpPatchSite::EmitJumpIf(Not)Smi(), using the information encoded by
790// JumpPatchSite::EmitPatchInfo().
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000791void PatchInlinedSmiCode(Isolate* isolate, Address address,
792 InlinedSmiCheck check) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000793 // The patch information is encoded in the instruction stream using
794 // instructions which have no side effects, so we can safely execute them.
795 // The patch information is encoded directly after the call to the helper
796 // function which is requesting this patch operation.
797 Address info_address = Assembler::return_address_from_call_start(address);
798 InlineSmiCheckInfo info(info_address);
799
800 // Check and decode the patch information instruction.
801 if (!info.HasSmiCheck()) {
802 return;
803 }
804
805 if (FLAG_trace_ic) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100806 PrintF("[ Patching ic at %p, marker=%p, SMI check=%p\n",
807 static_cast<void*>(address), static_cast<void*>(info_address),
808 static_cast<void*>(info.SmiCheck()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 }
810
811 // Patch and activate code generated by JumpPatchSite::EmitJumpIfNotSmi()
812 // and JumpPatchSite::EmitJumpIfSmi().
813 // Changing
814 // tb(n)z xzr, #0, <target>
815 // to
816 // tb(!n)z test_reg, #0, <target>
817 Instruction* to_patch = info.SmiCheck();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000818 PatchingAssembler patcher(isolate, to_patch, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 DCHECK(to_patch->IsTestBranch());
820 DCHECK(to_patch->ImmTestBranchBit5() == 0);
821 DCHECK(to_patch->ImmTestBranchBit40() == 0);
822
823 STATIC_ASSERT(kSmiTag == 0);
824 STATIC_ASSERT(kSmiTagMask == 1);
825
826 int branch_imm = to_patch->ImmTestBranch();
827 Register smi_reg;
828 if (check == ENABLE_INLINED_SMI_CHECK) {
829 DCHECK(to_patch->Rt() == xzr.code());
830 smi_reg = info.SmiRegister();
831 } else {
832 DCHECK(check == DISABLE_INLINED_SMI_CHECK);
833 DCHECK(to_patch->Rt() != xzr.code());
834 smi_reg = xzr;
835 }
836
837 if (to_patch->Mask(TestBranchMask) == TBZ) {
838 // This is JumpIfNotSmi(smi_reg, branch_imm).
839 patcher.tbnz(smi_reg, 0, branch_imm);
840 } else {
841 DCHECK(to_patch->Mask(TestBranchMask) == TBNZ);
842 // This is JumpIfSmi(smi_reg, branch_imm).
843 patcher.tbz(smi_reg, 0, branch_imm);
844 }
845}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000846} // namespace internal
847} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848
849#endif // V8_TARGET_ARCH_ARM64