blob: da74582182fb8c4d799a2951970282a81ff15e9c [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#if V8_TARGET_ARCH_X64
6
7#include "src/codegen.h"
8#include "src/ic/ic.h"
9#include "src/ic/ic-compiler.h"
10#include "src/ic/stub-cache.h"
11
12namespace v8 {
13namespace internal {
14
15// ----------------------------------------------------------------------------
16// Static IC stub generators.
17//
18
19#define __ ACCESS_MASM(masm)
20
21
22static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
23 Label* global_object) {
24 // Register usage:
25 // type: holds the receiver instance type on entry.
26 __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
27 __ j(equal, global_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000028 __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
29 __ j(equal, global_object);
30}
31
32
33// Helper function used to load a property from a dictionary backing storage.
34// This function may return false negatives, so miss_label
35// must always call a backup property load that is complete.
36// This function is safe to call if name is not an internalized string,
37// and will jump to the miss_label in that case.
38// The generated code assumes that the receiver has slow properties,
39// is not a global object and does not have interceptors.
40static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
41 Register elements, Register name,
42 Register r0, Register r1, Register result) {
43 // Register use:
44 //
45 // elements - holds the property dictionary on entry and is unchanged.
46 //
47 // name - holds the name of the property on entry and is unchanged.
48 //
49 // r0 - used to hold the capacity of the property dictionary.
50 //
51 // r1 - used to hold the index into the property dictionary.
52 //
53 // result - holds the result on exit if the load succeeded.
54
55 Label done;
56
57 // Probe the dictionary.
58 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
59 elements, name, r0, r1);
60
61 // If probing finds an entry in the dictionary, r1 contains the
62 // index into the dictionary. Check that the value is a normal
63 // property.
64 __ bind(&done);
65 const int kElementsStartOffset =
66 NameDictionary::kHeaderSize +
67 NameDictionary::kElementsStartIndex * kPointerSize;
68 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
69 __ Test(Operand(elements, r1, times_pointer_size,
70 kDetailsOffset - kHeapObjectTag),
71 Smi::FromInt(PropertyDetails::TypeField::kMask));
72 __ j(not_zero, miss_label);
73
74 // Get the value at the masked, scaled index.
75 const int kValueOffset = kElementsStartOffset + kPointerSize;
76 __ movp(result, Operand(elements, r1, times_pointer_size,
77 kValueOffset - kHeapObjectTag));
78}
79
80
81// Helper function used to store a property to a dictionary backing
82// storage. This function may fail to store a property even though it
83// is in the dictionary, so code at miss_label must always call a
84// backup property store that is complete. This function is safe to
85// call if name is not an internalized string, and will jump to the miss_label
86// in that case. The generated code assumes that the receiver has slow
87// properties, is not a global object and does not have interceptors.
88static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
89 Register elements, Register name,
90 Register value, Register scratch0,
91 Register scratch1) {
92 // Register use:
93 //
94 // elements - holds the property dictionary on entry and is clobbered.
95 //
96 // name - holds the name of the property on entry and is unchanged.
97 //
98 // value - holds the value to store and is unchanged.
99 //
100 // scratch0 - used during the positive dictionary lookup and is clobbered.
101 //
102 // scratch1 - used for index into the property dictionary and is clobbered.
103 Label done;
104
105 // Probe the dictionary.
106 NameDictionaryLookupStub::GeneratePositiveLookup(
107 masm, miss_label, &done, elements, name, scratch0, scratch1);
108
109 // If probing finds an entry in the dictionary, scratch0 contains the
110 // index into the dictionary. Check that the value is a normal
111 // property that is not read only.
112 __ bind(&done);
113 const int kElementsStartOffset =
114 NameDictionary::kHeaderSize +
115 NameDictionary::kElementsStartIndex * kPointerSize;
116 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
117 const int kTypeAndReadOnlyMask =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400118 PropertyDetails::TypeField::kMask |
119 PropertyDetails::AttributesField::encode(READ_ONLY);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 __ Test(Operand(elements, scratch1, times_pointer_size,
121 kDetailsOffset - kHeapObjectTag),
122 Smi::FromInt(kTypeAndReadOnlyMask));
123 __ j(not_zero, miss_label);
124
125 // Store the value at the masked, scaled index.
126 const int kValueOffset = kElementsStartOffset + kPointerSize;
127 __ leap(scratch1, Operand(elements, scratch1, times_pointer_size,
128 kValueOffset - kHeapObjectTag));
129 __ movp(Operand(scratch1, 0), value);
130
131 // Update write barrier. Make sure not to clobber the value.
132 __ movp(scratch0, value);
133 __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
134}
135
136
137// Checks the receiver for special cases (value type, slow case bits).
138// Falls through for regular JS object.
139static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
140 Register receiver, Register map,
141 int interceptor_bit, Label* slow) {
142 // Register use:
143 // receiver - holds the receiver and is unchanged.
144 // Scratch registers:
145 // map - used to hold the map of the receiver.
146
147 // Check that the object isn't a smi.
148 __ JumpIfSmi(receiver, slow);
149
150 // Check that the object is some kind of JS object EXCEPT JS Value type.
151 // In the case that the object is a value-wrapper object,
152 // we enter the runtime system to make sure that indexing
153 // into string objects work as intended.
154 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
155 __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
156 __ j(below, slow);
157
158 // Check bit field.
159 __ testb(
160 FieldOperand(map, Map::kBitFieldOffset),
161 Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
162 __ j(not_zero, slow);
163}
164
165
166// Loads an indexed element from a fast case array.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000167static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
168 Register key, Register elements,
169 Register scratch, Register result,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100170 Label* slow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171 // Register use:
172 //
173 // receiver - holds the receiver on entry.
174 // Unchanged unless 'result' is the same register.
175 //
176 // key - holds the smi key on entry.
177 // Unchanged unless 'result' is the same register.
178 //
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 // result - holds the result on exit if the load succeeded.
180 // Allowed to be the the same as 'receiver' or 'key'.
181 // Unchanged on bailout so 'receiver' and 'key' can be safely
182 // used by further computation.
183 //
184 // Scratch registers:
185 //
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000186 // elements - holds the elements of the receiver and its prototypes.
187 //
188 // scratch - used to hold maps, prototypes, and the loaded value.
189 Label check_prototypes, check_next_prototype;
190 Label done, in_bounds, absent;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000191
192 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 __ AssertFastElements(elements);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000194 // Check that the key (index) is within bounds.
195 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
196 // Unsigned comparison rejects negative indices.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197 __ j(below, &in_bounds);
198
199 // Out-of-bounds. Check the prototype chain to see if we can just return
200 // 'undefined'.
201 __ SmiCompare(key, Smi::FromInt(0));
202 __ j(less, slow); // Negative keys can't take the fast OOB path.
203 __ bind(&check_prototypes);
204 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
205 __ bind(&check_next_prototype);
206 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
207 // scratch: current prototype
208 __ CompareRoot(scratch, Heap::kNullValueRootIndex);
209 __ j(equal, &absent);
210 __ movp(elements, FieldOperand(scratch, JSObject::kElementsOffset));
211 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
212 // elements: elements of current prototype
213 // scratch: map of current prototype
214 __ CmpInstanceType(scratch, JS_OBJECT_TYPE);
215 __ j(below, slow);
216 __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
217 Immediate((1 << Map::kIsAccessCheckNeeded) |
218 (1 << Map::kHasIndexedInterceptor)));
219 __ j(not_zero, slow);
220 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
221 __ j(not_equal, slow);
222 __ jmp(&check_next_prototype);
223
224 __ bind(&absent);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100225 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
226 __ jmp(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000227
228 __ bind(&in_bounds);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000229 // Fast case: Do the load.
230 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
231 __ movp(scratch, FieldOperand(elements, index.reg, index.scale,
232 FixedArray::kHeaderSize));
233 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000234 // In case the loaded value is the_hole we have to check the prototype chain.
235 __ j(equal, &check_prototypes);
236 __ Move(result, scratch);
237 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238}
239
240
241// Checks whether a key is an array index string or a unique name.
242// Falls through if the key is a unique name.
243static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
244 Register map, Register hash,
245 Label* index_string, Label* not_unique) {
246 // Register use:
247 // key - holds the key and is unchanged. Assumed to be non-smi.
248 // Scratch registers:
249 // map - used to hold the map of the key.
250 // hash - used to hold the hash of the key.
251 Label unique;
252 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
253 __ j(above, not_unique);
254 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
255 __ j(equal, &unique);
256
257 // Is the string an array index, with cached numeric value?
258 __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
259 __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
260 __ j(zero, index_string); // The value in hash is used at jump target.
261
262 // Is the string internalized? We already know it's a string so a single
263 // bit test is enough.
264 STATIC_ASSERT(kNotInternalizedTag != 0);
265 __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
266 Immediate(kIsNotInternalizedMask));
267 __ j(not_zero, not_unique);
268
269 __ bind(&unique);
270}
271
Ben Murdoch097c5b22016-05-18 11:27:45 +0100272void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000273 // The return address is on the stack.
274 Label slow, check_name, index_smi, index_name, property_array_property;
275 Label probe_dictionary, check_number_dictionary;
276
277 Register receiver = LoadDescriptor::ReceiverRegister();
278 Register key = LoadDescriptor::NameRegister();
279 DCHECK(receiver.is(rdx));
280 DCHECK(key.is(rcx));
281
282 // Check that the key is a smi.
283 __ JumpIfNotSmi(key, &check_name);
284 __ bind(&index_smi);
285 // Now the key is known to be a smi. This place is also jumped to from below
286 // where a numeric string is converted to a smi.
287
288 GenerateKeyedLoadReceiverCheck(masm, receiver, rax,
289 Map::kHasIndexedInterceptor, &slow);
290
291 // Check the receiver's map to see if it has fast elements.
292 __ CheckFastElements(rax, &check_number_dictionary);
293
Ben Murdoch097c5b22016-05-18 11:27:45 +0100294 GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000295 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100296 __ IncrementCounter(counters->ic_keyed_load_generic_smi(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297 __ ret(0);
298
299 __ bind(&check_number_dictionary);
300 __ SmiToInteger32(rbx, key);
301 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
302
303 // Check whether the elements is a number dictionary.
304 // rbx: key as untagged int32
305 // rax: elements
306 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
307 Heap::kHashTableMapRootIndex);
308 __ j(not_equal, &slow);
309 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
310 __ ret(0);
311
312 __ bind(&slow);
313 // Slow case: Jump to runtime.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100314 __ IncrementCounter(counters->ic_keyed_load_generic_slow(), 1);
315 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316
317 __ bind(&check_name);
318 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
319
320 GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor,
321 &slow);
322
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000323 // If the receiver is a fast-case object, check the stub cache. Otherwise
324 // probe the dictionary.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
326 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
327 Heap::kHashTableMapRootIndex);
328 __ j(equal, &probe_dictionary);
329
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 Register megamorphic_scratch = rdi;
331 // The handlers in the stub cache expect a vector and slot. Since we won't
332 // change the IC from any downstream misses, a dummy vector can be used.
333 Register vector = LoadWithVectorDescriptor::VectorRegister();
334 Register slot = LoadDescriptor::SlotRegister();
335 DCHECK(!AreAliased(megamorphic_scratch, vector, slot));
336 Handle<TypeFeedbackVector> dummy_vector =
337 TypeFeedbackVector::DummyVector(masm->isolate());
338 int slot_index = dummy_vector->GetIndex(
339 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
340 __ Move(vector, dummy_vector);
341 __ Move(slot, Smi::FromInt(slot_index));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342
Ben Murdochc5610432016-08-08 18:44:38 +0100343 Code::Flags flags =
344 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000345 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags,
346 receiver, key,
347 megamorphic_scratch, no_reg);
348 // Cache miss.
349 GenerateMiss(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350
351 // Do a quick inline probe of the receiver's dictionary, if it
352 // exists.
353 __ bind(&probe_dictionary);
354 // rbx: elements
355
356 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
357 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
358 GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
359
360 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100361 __ IncrementCounter(counters->ic_keyed_load_generic_symbol(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000362 __ ret(0);
363
364 __ bind(&index_name);
365 __ IndexFromHash(rbx, key);
366 __ jmp(&index_smi);
367}
368
369
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400370static void KeyedStoreGenerateMegamorphicHelper(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000371 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
372 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
373 Label transition_smi_elements;
374 Label finish_object_store, non_double_value, transition_double_elements;
375 Label fast_double_without_map_check;
376 Register receiver = StoreDescriptor::ReceiverRegister();
377 Register key = StoreDescriptor::NameRegister();
378 Register value = StoreDescriptor::ValueRegister();
379 DCHECK(receiver.is(rdx));
380 DCHECK(key.is(rcx));
381 DCHECK(value.is(rax));
382 // Fast case: Do the store, could be either Object or double.
383 __ bind(fast_object);
384 // rbx: receiver's elements array (a FixedArray)
385 // receiver is a JSArray.
386 // r9: map of receiver
387 if (check_map == kCheckMap) {
388 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
389 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
390 __ j(not_equal, fast_double);
391 }
392
393 // HOLECHECK: guards "A[i] = V"
394 // We have to go to the runtime if the current value is the hole because
395 // there may be a callback on the element
396 Label holecheck_passed1;
397 __ movp(kScratchRegister,
398 FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize));
399 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
400 __ j(not_equal, &holecheck_passed1);
401 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
402
403 __ bind(&holecheck_passed1);
404
405 // Smi stores don't require further checks.
406 Label non_smi_value;
407 __ JumpIfNotSmi(value, &non_smi_value);
408 if (increment_length == kIncrementLength) {
409 // Add 1 to receiver->length.
410 __ leal(rdi, Operand(key, 1));
411 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
412 }
413 // It's irrelevant whether array is smi-only or not when writing a smi.
414 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
415 value);
416 __ ret(0);
417
418 __ bind(&non_smi_value);
419 // Writing a non-smi, check whether array allows non-smi elements.
420 // r9: receiver's map
421 __ CheckFastObjectElements(r9, &transition_smi_elements);
422
423 __ bind(&finish_object_store);
424 if (increment_length == kIncrementLength) {
425 // Add 1 to receiver->length.
426 __ leal(rdi, Operand(key, 1));
427 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
428 }
429 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
430 value);
431 __ movp(rdx, value); // Preserve the value which is returned.
432 __ RecordWriteArray(rbx, rdx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
433 OMIT_SMI_CHECK);
434 __ ret(0);
435
436 __ bind(fast_double);
437 if (check_map == kCheckMap) {
438 // Check for fast double array case. If this fails, call through to the
439 // runtime.
440 // rdi: elements array's map
441 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
442 __ j(not_equal, slow);
443 }
444
445 // HOLECHECK: guards "A[i] double hole?"
446 // We have to see if the double version of the hole is present. If so
447 // go to the runtime.
448 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
449 __ cmpl(FieldOperand(rbx, key, times_8, offset), Immediate(kHoleNanUpper32));
450 __ j(not_equal, &fast_double_without_map_check);
451 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
452
453 __ bind(&fast_double_without_map_check);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100454 __ StoreNumberToDoubleElements(value, rbx, key, kScratchDoubleReg,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 &transition_double_elements);
456 if (increment_length == kIncrementLength) {
457 // Add 1 to receiver->length.
458 __ leal(rdi, Operand(key, 1));
459 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
460 }
461 __ ret(0);
462
463 __ bind(&transition_smi_elements);
464 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
465
466 // Transition the array appropriately depending on the value type.
467 __ movp(r9, FieldOperand(value, HeapObject::kMapOffset));
468 __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
469 __ j(not_equal, &non_double_value);
470
471 // Value is a double. Transition FAST_SMI_ELEMENTS ->
472 // FAST_DOUBLE_ELEMENTS and complete the store.
473 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
474 FAST_DOUBLE_ELEMENTS, rbx, rdi, slow);
475 AllocationSiteMode mode =
476 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
477 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
478 rbx, mode, slow);
479 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
480 __ jmp(&fast_double_without_map_check);
481
482 __ bind(&non_double_value);
483 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
484 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, rbx,
485 rdi, slow);
486 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
487 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
488 masm, receiver, key, value, rbx, mode, slow);
489 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
490 __ jmp(&finish_object_store);
491
492 __ bind(&transition_double_elements);
493 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
494 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
495 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
496 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
497 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
498 rbx, rdi, slow);
499 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
500 ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
501 value, rbx, mode, slow);
502 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
503 __ jmp(&finish_object_store);
504}
505
506
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400507void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000508 LanguageMode language_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 // Return address is on the stack.
510 Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
511 Label fast_double, fast_double_grow;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400512 Label array, extra, check_if_double_array, maybe_name_key, miss;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513 Register receiver = StoreDescriptor::ReceiverRegister();
514 Register key = StoreDescriptor::NameRegister();
515 DCHECK(receiver.is(rdx));
516 DCHECK(key.is(rcx));
517
518 // Check that the object isn't a smi.
519 __ JumpIfSmi(receiver, &slow_with_tagged_index);
520 // Get the map from the receiver.
521 __ movp(r9, FieldOperand(receiver, HeapObject::kMapOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100522 // Check that the receiver does not require access checks.
523 // The generic stub does not perform map checks.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000524 __ testb(FieldOperand(r9, Map::kBitFieldOffset),
Ben Murdochc5610432016-08-08 18:44:38 +0100525 Immediate(1 << Map::kIsAccessCheckNeeded));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000526 __ j(not_zero, &slow_with_tagged_index);
527 // Check that the key is a smi.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400528 __ JumpIfNotSmi(key, &maybe_name_key);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 __ SmiToInteger32(key, key);
530
531 __ CmpInstanceType(r9, JS_ARRAY_TYPE);
532 __ j(equal, &array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 // Check that the object is some kind of JS object EXCEPT JS Value type. In
534 // the case that the object is a value-wrapper object, we enter the runtime
535 // system to make sure that indexing into string objects works as intended.
536 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
537 __ CmpInstanceType(r9, JS_OBJECT_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000538 __ j(below, &slow);
539
540 // Object case: Check key against length in the elements array.
541 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
542 // Check array bounds.
543 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
544 // rbx: FixedArray
545 __ j(above, &fast_object);
546
547 // Slow case: call runtime.
548 __ bind(&slow);
549 __ Integer32ToSmi(key, key);
550 __ bind(&slow_with_tagged_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000552 // Never returns to here.
553
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400554 __ bind(&maybe_name_key);
555 __ movp(r9, FieldOperand(key, HeapObject::kMapOffset));
556 __ movzxbp(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
557 __ JumpIfNotUniqueNameInstanceType(r9, &slow_with_tagged_index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000558
559 Register vector = VectorStoreICDescriptor::VectorRegister();
560 Register slot = VectorStoreICDescriptor::SlotRegister();
561 // The handlers in the stub cache expect a vector and slot. Since we won't
562 // change the IC from any downstream misses, a dummy vector can be used.
563 Handle<TypeFeedbackVector> dummy_vector =
564 TypeFeedbackVector::DummyVector(masm->isolate());
565 int slot_index = dummy_vector->GetIndex(
566 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
567 __ Move(vector, dummy_vector);
568 __ Move(slot, Smi::FromInt(slot_index));
569
Ben Murdochc5610432016-08-08 18:44:38 +0100570 Code::Flags flags =
571 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
572 masm->isolate()->stub_cache()->GenerateProbe(
573 masm, Code::KEYED_STORE_IC, flags, receiver, key, r9, no_reg);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400574 // Cache miss.
575 __ jmp(&miss);
576
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000577 // Extra capacity case: Check if there is extra capacity to
578 // perform the store and update the length. Used for adding one
579 // element to the array by writing to array[array.length].
580 __ bind(&extra);
581 // receiver is a JSArray.
582 // rbx: receiver's elements array (a FixedArray)
583 // flags: smicompare (receiver.length(), rbx)
584 __ j(not_equal, &slow); // do not leave holes in the array
585 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
586 __ j(below_equal, &slow);
587 // Increment index to get new length.
588 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
589 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
590 __ j(not_equal, &check_if_double_array);
591 __ jmp(&fast_object_grow);
592
593 __ bind(&check_if_double_array);
594 // rdi: elements array's map
595 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
596 __ j(not_equal, &slow);
597 __ jmp(&fast_double_grow);
598
599 // Array case: Get the length and the elements array from the JS
600 // array. Check that the array is in fast mode (and writable); if it
601 // is the length is always a smi.
602 __ bind(&array);
603 // receiver is a JSArray.
604 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
605
606 // Check the key against the length in the array, compute the
607 // address to store into and fall through to fast case.
608 __ SmiCompareInteger32(FieldOperand(receiver, JSArray::kLengthOffset), key);
609 __ j(below_equal, &extra);
610
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400611 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object, &fast_double, &slow,
612 kCheckMap, kDontIncrementLength);
613 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
614 &fast_double_grow, &slow, kDontCheckMap,
615 kIncrementLength);
616
617 __ bind(&miss);
618 GenerateMiss(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000619}
620
Ben Murdoch097c5b22016-05-18 11:27:45 +0100621void LoadIC::GenerateNormal(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622 Register dictionary = rax;
623 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
624 DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
625
626 Label slow;
627
628 __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
629 JSObject::kPropertiesOffset));
630 GenerateDictionaryLoad(masm, &slow, dictionary,
631 LoadDescriptor::NameRegister(), rbx, rdi, rax);
632 __ ret(0);
633
634 // Dictionary load failed, go slow (but don't miss).
635 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100636 LoadIC::GenerateRuntimeGetProperty(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637}
638
639
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400640static void LoadIC_PushArgs(MacroAssembler* masm) {
641 Register receiver = LoadDescriptor::ReceiverRegister();
642 Register name = LoadDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000643 Register slot = LoadDescriptor::SlotRegister();
644 Register vector = LoadWithVectorDescriptor::VectorRegister();
645 DCHECK(!rdi.is(receiver) && !rdi.is(name) && !rdi.is(slot) &&
646 !rdi.is(vector));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000648 __ PopReturnAddressTo(rdi);
649 __ Push(receiver);
650 __ Push(name);
651 __ Push(slot);
652 __ Push(vector);
653 __ PushReturnAddressFrom(rdi);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400654}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655
656
657void LoadIC::GenerateMiss(MacroAssembler* masm) {
658 // The return address is on the stack.
659
660 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100661 __ IncrementCounter(counters->ic_load_miss(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000662
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663 LoadIC_PushArgs(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664
665 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000666 __ TailCallRuntime(Runtime::kLoadIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000667}
668
Ben Murdoch097c5b22016-05-18 11:27:45 +0100669void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 // The return address is on the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400671 Register receiver = LoadDescriptor::ReceiverRegister();
672 Register name = LoadDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400674 DCHECK(!rbx.is(receiver) && !rbx.is(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400676 __ PopReturnAddressTo(rbx);
677 __ Push(receiver);
678 __ Push(name);
679 __ PushReturnAddressFrom(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000680
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000681 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100682 __ TailCallRuntime(Runtime::kGetProperty);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683}
684
685
686void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
687 // The return address is on the stack.
688 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100689 __ IncrementCounter(counters->ic_keyed_load_miss(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000690
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400691 LoadIC_PushArgs(masm);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692
693 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695}
696
Ben Murdoch097c5b22016-05-18 11:27:45 +0100697void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000698 // The return address is on the stack.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400699 Register receiver = LoadDescriptor::ReceiverRegister();
700 Register name = LoadDescriptor::NameRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 DCHECK(!rbx.is(receiver) && !rbx.is(name));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400704 __ PopReturnAddressTo(rbx);
705 __ Push(receiver);
706 __ Push(name);
707 __ PushReturnAddressFrom(rbx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100710 __ TailCallRuntime(Runtime::kKeyedGetProperty);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711}
712
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713static void StoreIC_PushArgs(MacroAssembler* masm) {
714 Register receiver = StoreDescriptor::ReceiverRegister();
715 Register name = StoreDescriptor::NameRegister();
716 Register value = StoreDescriptor::ValueRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 Register temp = r11;
718 DCHECK(!temp.is(receiver) && !temp.is(name) && !temp.is(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000720 __ PopReturnAddressTo(temp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 __ Push(receiver);
722 __ Push(name);
723 __ Push(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000724 Register slot = VectorStoreICDescriptor::SlotRegister();
725 Register vector = VectorStoreICDescriptor::VectorRegister();
726 DCHECK(!temp.is(slot) && !temp.is(vector));
727 __ Push(slot);
728 __ Push(vector);
729 __ PushReturnAddressFrom(temp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730}
731
732
733void StoreIC::GenerateMiss(MacroAssembler* masm) {
734 // Return address is on the stack.
735 StoreIC_PushArgs(masm);
736
737 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000738 __ TailCallRuntime(Runtime::kStoreIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000739}
740
741
742void StoreIC::GenerateNormal(MacroAssembler* masm) {
743 Register receiver = StoreDescriptor::ReceiverRegister();
744 Register name = StoreDescriptor::NameRegister();
745 Register value = StoreDescriptor::ValueRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 Register dictionary = r11;
747 DCHECK(!AreAliased(dictionary, VectorStoreICDescriptor::VectorRegister(),
748 VectorStoreICDescriptor::SlotRegister()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749
750 Label miss;
751
752 __ movp(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
753 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r8, r9);
754 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100755 __ IncrementCounter(counters->ic_store_normal_hit(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756 __ ret(0);
757
758 __ bind(&miss);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100759 __ IncrementCounter(counters->ic_store_normal_miss(), 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000760 GenerateMiss(masm);
761}
762
763
764void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
765 // Return address is on the stack.
766 StoreIC_PushArgs(masm);
767
768 // Do tail-call to runtime routine.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000769 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000770}
771
772
773#undef __
774
775
776Condition CompareIC::ComputeCondition(Token::Value op) {
777 switch (op) {
778 case Token::EQ_STRICT:
779 case Token::EQ:
780 return equal;
781 case Token::LT:
782 return less;
783 case Token::GT:
784 return greater;
785 case Token::LTE:
786 return less_equal;
787 case Token::GTE:
788 return greater_equal;
789 default:
790 UNREACHABLE();
791 return no_condition;
792 }
793}
794
795
796bool CompareIC::HasInlinedSmiCode(Address address) {
797 // The address of the instruction following the call.
798 Address test_instruction_address =
799 address + Assembler::kCallTargetAddressOffset;
800
801 // If the instruction following the call is not a test al, nothing
802 // was inlined.
803 return *test_instruction_address == Assembler::kTestAlByte;
804}
805
806
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000807void PatchInlinedSmiCode(Isolate* isolate, Address address,
808 InlinedSmiCheck check) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 // The address of the instruction following the call.
810 Address test_instruction_address =
811 address + Assembler::kCallTargetAddressOffset;
812
813 // If the instruction following the call is not a test al, nothing
814 // was inlined.
815 if (*test_instruction_address != Assembler::kTestAlByte) {
816 DCHECK(*test_instruction_address == Assembler::kNopByte);
817 return;
818 }
819
820 Address delta_address = test_instruction_address + 1;
821 // The delta to the start of the map check instruction and the
822 // condition code uses at the patched jump.
823 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
824 if (FLAG_trace_ic) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100825 PrintF("[ patching ic at %p, test=%p, delta=%d\n",
826 static_cast<void*>(address),
827 static_cast<void*>(test_instruction_address), delta);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 }
829
830 // Patch with a short conditional jump. Enabling means switching from a short
831 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
832 // reverse operation of that.
833 Address jmp_address = test_instruction_address - delta;
834 DCHECK((check == ENABLE_INLINED_SMI_CHECK)
835 ? (*jmp_address == Assembler::kJncShortOpcode ||
836 *jmp_address == Assembler::kJcShortOpcode)
837 : (*jmp_address == Assembler::kJnzShortOpcode ||
838 *jmp_address == Assembler::kJzShortOpcode));
839 Condition cc =
840 (check == ENABLE_INLINED_SMI_CHECK)
841 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
842 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
843 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
844}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000845} // namespace internal
846} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847
848#endif // V8_TARGET_ARCH_X64