blob: 1f0236db666cd783c0b54d2dd7fd087690622fd1 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
8#include "src/ic/ic.h"
9#include "src/ic/ic-compiler.h"
10#include "src/ic/stub-cache.h"
11
12namespace v8 {
13namespace internal {
14
15
16// ----------------------------------------------------------------------------
17// Static IC stub generators.
18//
19
20#define __ ACCESS_MASM(masm)
21
22
23static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
24 Label* global_object) {
25 // Register usage:
26 // type: holds the receiver instance type on entry.
27 __ cmpi(type, Operand(JS_GLOBAL_OBJECT_TYPE));
28 __ beq(global_object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040029 __ cmpi(type, Operand(JS_GLOBAL_PROXY_TYPE));
30 __ beq(global_object);
31}
32
33
34// Helper function used from LoadIC GenerateNormal.
35//
36// elements: Property dictionary. It is not clobbered if a jump to the miss
37// label is done.
38// name: Property name. It is not clobbered if a jump to the miss label is
39// done
40// result: Register for the result. It is only updated if a jump to the miss
41// label is not done. Can be the same as elements or name clobbering
42// one of these in the case of not jumping to the miss label.
43// The two scratch registers need to be different from elements, name and
44// result.
45// The generated code assumes that the receiver has slow properties,
46// is not a global object and does not have interceptors.
47static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss,
48 Register elements, Register name,
49 Register result, Register scratch1,
50 Register scratch2) {
51 // Main use of the scratch registers.
52 // scratch1: Used as temporary and to hold the capacity of the property
53 // dictionary.
54 // scratch2: Used as temporary.
55 Label done;
56
57 // Probe the dictionary.
58 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
59 name, scratch1, scratch2);
60
61 // If probing finds an entry check that the value is a normal
62 // property.
63 __ bind(&done); // scratch2 == elements + 4 * index
64 const int kElementsStartOffset =
65 NameDictionary::kHeaderSize +
66 NameDictionary::kElementsStartIndex * kPointerSize;
67 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
68 __ LoadP(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
69 __ mr(r0, scratch2);
70 __ LoadSmiLiteral(scratch2, Smi::FromInt(PropertyDetails::TypeField::kMask));
71 __ and_(scratch2, scratch1, scratch2, SetRC);
72 __ bne(miss, cr0);
73 __ mr(scratch2, r0);
74
75 // Get the value at the masked, scaled index and return.
76 __ LoadP(result,
77 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
78}
79
80
81// Helper function used from StoreIC::GenerateNormal.
82//
83// elements: Property dictionary. It is not clobbered if a jump to the miss
84// label is done.
85// name: Property name. It is not clobbered if a jump to the miss label is
86// done
87// value: The value to store.
88// The two scratch registers need to be different from elements, name and
89// result.
90// The generated code assumes that the receiver has slow properties,
91// is not a global object and does not have interceptors.
92static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss,
93 Register elements, Register name,
94 Register value, Register scratch1,
95 Register scratch2) {
96 // Main use of the scratch registers.
97 // scratch1: Used as temporary and to hold the capacity of the property
98 // dictionary.
99 // scratch2: Used as temporary.
100 Label done;
101
102 // Probe the dictionary.
103 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss, &done, elements,
104 name, scratch1, scratch2);
105
106 // If probing finds an entry in the dictionary check that the value
107 // is a normal property that is not read only.
108 __ bind(&done); // scratch2 == elements + 4 * index
109 const int kElementsStartOffset =
110 NameDictionary::kHeaderSize +
111 NameDictionary::kElementsStartIndex * kPointerSize;
112 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
113 int kTypeAndReadOnlyMask =
114 PropertyDetails::TypeField::kMask |
115 PropertyDetails::AttributesField::encode(READ_ONLY);
116 __ LoadP(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
117 __ mr(r0, scratch2);
118 __ LoadSmiLiteral(scratch2, Smi::FromInt(kTypeAndReadOnlyMask));
119 __ and_(scratch2, scratch1, scratch2, SetRC);
120 __ bne(miss, cr0);
121 __ mr(scratch2, r0);
122
123 // Store the value at the masked, scaled index and return.
124 const int kValueOffset = kElementsStartOffset + kPointerSize;
125 __ addi(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
126 __ StoreP(value, MemOperand(scratch2));
127
128 // Update the write barrier. Make sure not to clobber the value.
129 __ mr(scratch1, value);
130 __ RecordWrite(elements, scratch2, scratch1, kLRHasNotBeenSaved,
131 kDontSaveFPRegs);
132}
133
134
135// Checks the receiver for special cases (value type, slow case bits).
136// Falls through for regular JS object.
137static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
138 Register receiver, Register map,
139 Register scratch,
140 int interceptor_bit, Label* slow) {
141 // Check that the object isn't a smi.
142 __ JumpIfSmi(receiver, slow);
143 // Get the map of the receiver.
144 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
145 // Check bit field.
146 __ lbz(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
147 DCHECK(((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)) < 0x8000);
148 __ andi(r0, scratch,
149 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
150 __ bne(slow, cr0);
151 // Check that the object is some kind of JS object EXCEPT JS Value type.
152 // In the case that the object is a value-wrapper object,
153 // we enter the runtime system to make sure that indexing into string
154 // objects work as intended.
155 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
156 __ lbz(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
157 __ cmpi(scratch, Operand(JS_OBJECT_TYPE));
158 __ blt(slow);
159}
160
161
162// Loads an indexed element from a fast case array.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400163static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
164 Register key, Register elements,
165 Register scratch1, Register scratch2,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100166 Register result, Label* slow) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167 // Register use:
168 //
169 // receiver - holds the receiver on entry.
170 // Unchanged unless 'result' is the same register.
171 //
172 // key - holds the smi key on entry.
173 // Unchanged unless 'result' is the same register.
174 //
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400175 // result - holds the result on exit if the load succeeded.
176 // Allowed to be the the same as 'receiver' or 'key'.
177 // Unchanged on bailout so 'receiver' and 'key' can be safely
178 // used by further computation.
179 //
180 // Scratch registers:
181 //
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000182 // elements - holds the elements of the receiver and its protoypes.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400183 //
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000184 // scratch1 - used to hold elements length, bit fields, base addresses.
185 //
186 // scratch2 - used to hold maps, prototypes, and the loaded value.
187 Label check_prototypes, check_next_prototype;
188 Label done, in_bounds, absent;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400189
190 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191 __ AssertFastElements(elements);
192
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400193 // Check that the key (index) is within bounds.
194 __ LoadP(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
195 __ cmpl(key, scratch1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000196 __ blt(&in_bounds);
197 // Out-of-bounds. Check the prototype chain to see if we can just return
198 // 'undefined'.
199 __ cmpi(key, Operand::Zero());
200 __ blt(slow); // Negative keys can't take the fast OOB path.
201 __ bind(&check_prototypes);
202 __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
203 __ bind(&check_next_prototype);
204 __ LoadP(scratch2, FieldMemOperand(scratch2, Map::kPrototypeOffset));
205 // scratch2: current prototype
206 __ CompareRoot(scratch2, Heap::kNullValueRootIndex);
207 __ beq(&absent);
208 __ LoadP(elements, FieldMemOperand(scratch2, JSObject::kElementsOffset));
209 __ LoadP(scratch2, FieldMemOperand(scratch2, HeapObject::kMapOffset));
210 // elements: elements of current prototype
211 // scratch2: map of current prototype
212 __ CompareInstanceType(scratch2, scratch1, JS_OBJECT_TYPE);
213 __ blt(slow);
214 __ lbz(scratch1, FieldMemOperand(scratch2, Map::kBitFieldOffset));
215 __ andi(r0, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
216 (1 << Map::kHasIndexedInterceptor)));
217 __ bne(slow, cr0);
218 __ CompareRoot(elements, Heap::kEmptyFixedArrayRootIndex);
219 __ bne(slow);
220 __ jmp(&check_next_prototype);
221
222 __ bind(&absent);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100223 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
224 __ jmp(&done);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225
226 __ bind(&in_bounds);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400227 // Fast case: Do the load.
228 __ addi(scratch1, elements,
229 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
230 // The key is a smi.
231 __ SmiToPtrArrayOffset(scratch2, key);
232 __ LoadPX(scratch2, MemOperand(scratch2, scratch1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
234 // In case the loaded value is the_hole we have to check the prototype chain.
235 __ beq(&check_prototypes);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400236 __ mr(result, scratch2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000237 __ bind(&done);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400238}
239
240
241// Checks whether a key is an array index string or a unique name.
242// Falls through if a key is a unique name.
243static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
244 Register map, Register hash,
245 Label* index_string, Label* not_unique) {
246 // The key is not a smi.
247 Label unique;
248 // Is it a name?
249 __ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE);
250 __ bgt(not_unique);
251 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
252 __ beq(&unique);
253
254 // Is the string an array index, with cached numeric value?
255 __ lwz(hash, FieldMemOperand(key, Name::kHashFieldOffset));
256 __ mov(r8, Operand(Name::kContainsCachedArrayIndexMask));
257 __ and_(r0, hash, r8, SetRC);
258 __ beq(index_string, cr0);
259
260 // Is the string internalized? We know it's a string, so a single
261 // bit test is enough.
262 // map: key map
263 __ lbz(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
264 STATIC_ASSERT(kInternalizedTag == 0);
265 __ andi(r0, hash, Operand(kIsNotInternalizedMask));
266 __ bne(not_unique, cr0);
267
268 __ bind(&unique);
269}
270
Ben Murdoch097c5b22016-05-18 11:27:45 +0100271void LoadIC::GenerateNormal(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400272 Register dictionary = r3;
273 DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
274 DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
275
276 Label slow;
277
278 __ LoadP(dictionary, FieldMemOperand(LoadDescriptor::ReceiverRegister(),
279 JSObject::kPropertiesOffset));
280 GenerateDictionaryLoad(masm, &slow, dictionary,
281 LoadDescriptor::NameRegister(), r3, r6, r7);
282 __ Ret();
283
284 // Dictionary load failed, go slow (but don't miss).
285 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100286 GenerateRuntimeGetProperty(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400287}
288
289
290// A register that isn't one of the parameters to the load ic.
291static const Register LoadIC_TempRegister() { return r6; }
292
293
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294static void LoadIC_PushArgs(MacroAssembler* masm) {
295 Register receiver = LoadDescriptor::ReceiverRegister();
296 Register name = LoadDescriptor::NameRegister();
297 Register slot = LoadDescriptor::SlotRegister();
298 Register vector = LoadWithVectorDescriptor::VectorRegister();
299
300 __ Push(receiver, name, slot, vector);
301}
302
303
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400304void LoadIC::GenerateMiss(MacroAssembler* masm) {
305 // The return address is in lr.
306 Isolate* isolate = masm->isolate();
307
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000308 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::SlotRegister(),
309 LoadWithVectorDescriptor::VectorRegister()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100310 __ IncrementCounter(isolate->counters()->ic_load_miss(), 1, r7, r8);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 LoadIC_PushArgs(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400313
314 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315 __ TailCallRuntime(Runtime::kLoadIC_Miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400316}
317
Ben Murdoch097c5b22016-05-18 11:27:45 +0100318void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400319 // The return address is in lr.
320
321 __ mr(LoadIC_TempRegister(), LoadDescriptor::ReceiverRegister());
322 __ Push(LoadIC_TempRegister(), LoadDescriptor::NameRegister());
323
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000324 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100325 __ TailCallRuntime(Runtime::kGetProperty);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400326}
327
328
329void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
330 // The return address is in lr.
331 Isolate* isolate = masm->isolate();
332
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 DCHECK(!AreAliased(r7, r8, LoadWithVectorDescriptor::SlotRegister(),
334 LoadWithVectorDescriptor::VectorRegister()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100335 __ IncrementCounter(isolate->counters()->ic_keyed_load_miss(), 1, r7, r8);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400336
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 LoadIC_PushArgs(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338
339 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341}
342
Ben Murdoch097c5b22016-05-18 11:27:45 +0100343void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400344 // The return address is in lr.
345
346 __ Push(LoadDescriptor::ReceiverRegister(), LoadDescriptor::NameRegister());
347
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348 // Do tail-call to runtime routine.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100349 __ TailCallRuntime(Runtime::kKeyedGetProperty);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400350}
351
Ben Murdoch097c5b22016-05-18 11:27:45 +0100352void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400353 // The return address is in lr.
354 Label slow, check_name, index_smi, index_name, property_array_property;
355 Label probe_dictionary, check_number_dictionary;
356
357 Register key = LoadDescriptor::NameRegister();
358 Register receiver = LoadDescriptor::ReceiverRegister();
359 DCHECK(key.is(r5));
360 DCHECK(receiver.is(r4));
361
362 Isolate* isolate = masm->isolate();
363
364 // Check that the key is a smi.
365 __ JumpIfNotSmi(key, &check_name);
366 __ bind(&index_smi);
367 // Now the key is known to be a smi. This place is also jumped to from below
368 // where a numeric string is converted to a smi.
369
370 GenerateKeyedLoadReceiverCheck(masm, receiver, r3, r6,
371 Map::kHasIndexedInterceptor, &slow);
372
373 // Check the receiver's map to see if it has fast elements.
374 __ CheckFastElements(r3, r6, &check_number_dictionary);
375
Ben Murdoch097c5b22016-05-18 11:27:45 +0100376 GenerateFastArrayLoad(masm, receiver, key, r3, r6, r7, r3, &slow);
377 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_smi(), 1, r7,
378 r6);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379 __ Ret();
380
381 __ bind(&check_number_dictionary);
382 __ LoadP(r7, FieldMemOperand(receiver, JSObject::kElementsOffset));
383 __ LoadP(r6, FieldMemOperand(r7, JSObject::kMapOffset));
384
385 // Check whether the elements is a number dictionary.
386 // r6: elements map
387 // r7: elements
388 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
389 __ cmp(r6, ip);
390 __ bne(&slow);
391 __ SmiUntag(r3, key);
392 __ LoadFromNumberDictionary(&slow, r7, key, r3, r3, r6, r8);
393 __ Ret();
394
395 // Slow case, key and receiver still in r3 and r4.
396 __ bind(&slow);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100397 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_slow(), 1, r7,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400398 r6);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100399 GenerateRuntimeGetProperty(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400400
401 __ bind(&check_name);
402 GenerateKeyNameCheck(masm, key, r3, r6, &index_name, &slow);
403
404 GenerateKeyedLoadReceiverCheck(masm, receiver, r3, r6,
405 Map::kHasNamedInterceptor, &slow);
406
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407 // If the receiver is a fast-case object, check the stub cache. Otherwise
408 // probe the dictionary.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400409 __ LoadP(r6, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
410 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
411 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
412 __ cmp(r7, ip);
413 __ beq(&probe_dictionary);
414
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400415
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416 // The handlers in the stub cache expect a vector and slot. Since we won't
417 // change the IC from any downstream misses, a dummy vector can be used.
418 Register vector = LoadWithVectorDescriptor::VectorRegister();
419 Register slot = LoadWithVectorDescriptor::SlotRegister();
420 DCHECK(!AreAliased(vector, slot, r7, r8, r9, r10));
421 Handle<TypeFeedbackVector> dummy_vector =
422 TypeFeedbackVector::DummyVector(masm->isolate());
423 int slot_index = dummy_vector->GetIndex(
424 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
425 __ LoadRoot(vector, Heap::kDummyVectorRootIndex);
426 __ LoadSmiLiteral(slot, Smi::FromInt(slot_index));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400427
Ben Murdochc5610432016-08-08 18:44:38 +0100428 Code::Flags flags =
429 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000430 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags,
431 receiver, key, r7, r8, r9, r10);
432 // Cache miss.
433 GenerateMiss(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400434
435 // Do a quick inline probe of the receiver's dictionary, if it
436 // exists.
437 __ bind(&probe_dictionary);
438 // r6: elements
439 __ LoadP(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
440 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
441 GenerateGlobalInstanceTypeCheck(masm, r3, &slow);
442 // Load the property to r3.
443 GenerateDictionaryLoad(masm, &slow, r6, key, r3, r8, r7);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100444 __ IncrementCounter(isolate->counters()->ic_keyed_load_generic_symbol(), 1,
445 r7, r6);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400446 __ Ret();
447
448 __ bind(&index_name);
449 __ IndexFromHash(r6, key);
450 // Now jump to the place where smi keys are handled.
451 __ b(&index_smi);
452}
453
454
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455static void StoreIC_PushArgs(MacroAssembler* masm) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400456 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457 StoreDescriptor::ValueRegister(),
458 VectorStoreICDescriptor::SlotRegister(),
459 VectorStoreICDescriptor::VectorRegister());
460}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400461
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000462
463void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
464 StoreIC_PushArgs(masm);
465
466 __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400467}
468
469
470static void KeyedStoreGenerateMegamorphicHelper(
471 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
472 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length,
473 Register value, Register key, Register receiver, Register receiver_map,
474 Register elements_map, Register elements) {
475 Label transition_smi_elements;
476 Label finish_object_store, non_double_value, transition_double_elements;
477 Label fast_double_without_map_check;
478
479 // Fast case: Do the store, could be either Object or double.
480 __ bind(fast_object);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000481 Register scratch = r7;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400482 Register address = r8;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000483 DCHECK(!AreAliased(value, key, receiver, receiver_map, elements_map, elements,
484 scratch, address));
485
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400486 if (check_map == kCheckMap) {
487 __ LoadP(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488 __ mov(scratch, Operand(masm->isolate()->factory()->fixed_array_map()));
489 __ cmp(elements_map, scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400490 __ bne(fast_double);
491 }
492
493 // HOLECHECK: guards "A[i] = V"
494 // We have to go to the runtime if the current value is the hole because
495 // there may be a callback on the element
496 Label holecheck_passed1;
497 __ addi(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 __ SmiToPtrArrayOffset(scratch, key);
499 __ LoadPX(scratch, MemOperand(address, scratch));
500 __ Cmpi(scratch, Operand(masm->isolate()->factory()->the_hole_value()), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400501 __ bne(&holecheck_passed1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000502 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch, slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400503
504 __ bind(&holecheck_passed1);
505
506 // Smi stores don't require further checks.
507 Label non_smi_value;
508 __ JumpIfNotSmi(value, &non_smi_value);
509
510 if (increment_length == kIncrementLength) {
511 // Add 1 to receiver->length.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 __ AddSmiLiteral(scratch, key, Smi::FromInt(1), r0);
513 __ StoreP(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400514 }
515 // It's irrelevant whether array is smi-only or not when writing a smi.
516 __ addi(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 __ SmiToPtrArrayOffset(scratch, key);
518 __ StorePX(value, MemOperand(address, scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400519 __ Ret();
520
521 __ bind(&non_smi_value);
522 // Escape to elements kind transition case.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000523 __ CheckFastObjectElements(receiver_map, scratch, &transition_smi_elements);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400524
525 // Fast elements array, store the value to the elements backing store.
526 __ bind(&finish_object_store);
527 if (increment_length == kIncrementLength) {
528 // Add 1 to receiver->length.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000529 __ AddSmiLiteral(scratch, key, Smi::FromInt(1), r0);
530 __ StoreP(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400531 }
532 __ addi(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000533 __ SmiToPtrArrayOffset(scratch, key);
534 __ StorePUX(value, MemOperand(address, scratch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400535 // Update write barrier for the elements array address.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536 __ mr(scratch, value); // Preserve the value which is returned.
537 __ RecordWrite(elements, address, scratch, kLRHasNotBeenSaved,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400538 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
539 __ Ret();
540
541 __ bind(fast_double);
542 if (check_map == kCheckMap) {
543 // Check for fast double array case. If this fails, call through to the
544 // runtime.
545 __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
546 __ bne(slow);
547 }
548
549 // HOLECHECK: guards "A[i] double hole?"
550 // We have to see if the double version of the hole is present. If so
551 // go to the runtime.
552 __ addi(address, elements,
553 Operand((FixedDoubleArray::kHeaderSize + Register::kExponentOffset -
554 kHeapObjectTag)));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555 __ SmiToDoubleArrayOffset(scratch, key);
556 __ lwzx(scratch, MemOperand(address, scratch));
557 __ Cmpi(scratch, Operand(kHoleNanUpper32), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400558 __ bne(&fast_double_without_map_check);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559 __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch, slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400560
561 __ bind(&fast_double_without_map_check);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000562 __ StoreNumberToDoubleElements(value, key, elements, scratch, d0,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400563 &transition_double_elements);
564 if (increment_length == kIncrementLength) {
565 // Add 1 to receiver->length.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566 __ AddSmiLiteral(scratch, key, Smi::FromInt(1), r0);
567 __ StoreP(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset), r0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400568 }
569 __ Ret();
570
571 __ bind(&transition_smi_elements);
572 // Transition the array appropriately depending on the value type.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 __ LoadP(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
574 __ CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400575 __ bne(&non_double_value);
576
577 // Value is a double. Transition FAST_SMI_ELEMENTS ->
578 // FAST_DOUBLE_ELEMENTS and complete the store.
579 __ LoadTransitionedArrayMapConditional(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS, receiver_map, scratch, slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400581 AllocationSiteMode mode =
582 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
583 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
584 receiver_map, mode, slow);
585 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
586 __ b(&fast_double_without_map_check);
587
588 __ bind(&non_double_value);
589 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
590 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000591 receiver_map, scratch, slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400592 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
593 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
594 masm, receiver, key, value, receiver_map, mode, slow);
595 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
596 __ b(&finish_object_store);
597
598 __ bind(&transition_double_elements);
599 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
600 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
601 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
602 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000603 receiver_map, scratch, slow);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400604 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
605 ElementsTransitionGenerator::GenerateDoubleToObject(
606 masm, receiver, key, value, receiver_map, mode, slow);
607 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
608 __ b(&finish_object_store);
609}
610
611
612void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000613 LanguageMode language_mode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400614 // ---------- S t a t e --------------
615 // -- r3 : value
616 // -- r4 : key
617 // -- r5 : receiver
618 // -- lr : return address
619 // -----------------------------------
620 Label slow, fast_object, fast_object_grow;
621 Label fast_double, fast_double_grow;
622 Label array, extra, check_if_double_array, maybe_name_key, miss;
623
624 // Register usage.
625 Register value = StoreDescriptor::ValueRegister();
626 Register key = StoreDescriptor::NameRegister();
627 Register receiver = StoreDescriptor::ReceiverRegister();
628 DCHECK(receiver.is(r4));
629 DCHECK(key.is(r5));
630 DCHECK(value.is(r3));
631 Register receiver_map = r6;
632 Register elements_map = r9;
633 Register elements = r10; // Elements array of the receiver.
634 // r7 and r8 are used as general scratch registers.
635
636 // Check that the key is a smi.
637 __ JumpIfNotSmi(key, &maybe_name_key);
638 // Check that the object isn't a smi.
639 __ JumpIfSmi(receiver, &slow);
640 // Get the map of the object.
641 __ LoadP(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100642 // Check that the receiver does not require access checks.
643 // The generic stub does not perform map checks.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644 __ lbz(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
Ben Murdochc5610432016-08-08 18:44:38 +0100645 __ andi(r0, ip, Operand(1 << Map::kIsAccessCheckNeeded));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400646 __ bne(&slow, cr0);
647 // Check if the object is a JS array or not.
648 __ lbz(r7, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
649 __ cmpi(r7, Operand(JS_ARRAY_TYPE));
650 __ beq(&array);
651 // Check that the object is some kind of JSObject.
652 __ cmpi(r7, Operand(FIRST_JS_OBJECT_TYPE));
653 __ blt(&slow);
654
655 // Object case: Check key against length in the elements array.
656 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
657 // Check array bounds. Both the key and the length of FixedArray are smis.
658 __ LoadP(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
659 __ cmpl(key, ip);
660 __ blt(&fast_object);
661
662 // Slow case, handle jump to runtime.
663 __ bind(&slow);
664 // Entry registers are intact.
665 // r3: value.
666 // r4: key.
667 // r5: receiver.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000668 PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400669 // Never returns to here.
670
671 __ bind(&maybe_name_key);
672 __ LoadP(r7, FieldMemOperand(key, HeapObject::kMapOffset));
673 __ lbz(r7, FieldMemOperand(r7, Map::kInstanceTypeOffset));
674 __ JumpIfNotUniqueNameInstanceType(r7, &slow);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000675
676 // The handlers in the stub cache expect a vector and slot. Since we won't
677 // change the IC from any downstream misses, a dummy vector can be used.
678 Register vector = VectorStoreICDescriptor::VectorRegister();
679 Register slot = VectorStoreICDescriptor::SlotRegister();
680 DCHECK(!AreAliased(vector, slot, r8, r9, r10, r11));
681 Handle<TypeFeedbackVector> dummy_vector =
682 TypeFeedbackVector::DummyVector(masm->isolate());
683 int slot_index = dummy_vector->GetIndex(
684 FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
685 __ LoadRoot(vector, Heap::kDummyVectorRootIndex);
686 __ LoadSmiLiteral(slot, Smi::FromInt(slot_index));
687
Ben Murdochc5610432016-08-08 18:44:38 +0100688 Code::Flags flags =
689 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
690 masm->isolate()->stub_cache()->GenerateProbe(
691 masm, Code::KEYED_STORE_IC, flags, receiver, key, r8, r9, r10, r11);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 // Cache miss.
693 __ b(&miss);
694
695 // Extra capacity case: Check if there is extra capacity to
696 // perform the store and update the length. Used for adding one
697 // element to the array by writing to array[array.length].
698 __ bind(&extra);
699 // Condition code from comparing key and array length is still available.
700 __ bne(&slow); // Only support writing to writing to array[array.length].
701 // Check for room in the elements backing store.
702 // Both the key and the length of FixedArray are smis.
703 __ LoadP(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
704 __ cmpl(key, ip);
705 __ bge(&slow);
706 __ LoadP(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
707 __ mov(ip, Operand(masm->isolate()->factory()->fixed_array_map()));
708 __ cmp(elements_map, ip); // PPC - I think I can re-use ip here
709 __ bne(&check_if_double_array);
710 __ b(&fast_object_grow);
711
712 __ bind(&check_if_double_array);
713 __ mov(ip, Operand(masm->isolate()->factory()->fixed_double_array_map()));
714 __ cmp(elements_map, ip); // PPC - another ip re-use
715 __ bne(&slow);
716 __ b(&fast_double_grow);
717
718 // Array case: Get the length and the elements array from the JS
719 // array. Check that the array is in fast mode (and writable); if it
720 // is the length is always a smi.
721 __ bind(&array);
722 __ LoadP(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
723
724 // Check the key against the length in the array.
725 __ LoadP(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
726 __ cmpl(key, ip);
727 __ bge(&extra);
728
729 KeyedStoreGenerateMegamorphicHelper(
730 masm, &fast_object, &fast_double, &slow, kCheckMap, kDontIncrementLength,
731 value, key, receiver, receiver_map, elements_map, elements);
732 KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
733 &fast_double_grow, &slow, kDontCheckMap,
734 kIncrementLength, value, key, receiver,
735 receiver_map, elements_map, elements);
736 __ bind(&miss);
737 GenerateMiss(masm);
738}
739
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400740void StoreIC::GenerateMiss(MacroAssembler* masm) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000741 StoreIC_PushArgs(masm);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400742
743 // Perform tail call to the entry.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 __ TailCallRuntime(Runtime::kStoreIC_Miss);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400745}
746
747
748void StoreIC::GenerateNormal(MacroAssembler* masm) {
749 Label miss;
750 Register receiver = StoreDescriptor::ReceiverRegister();
751 Register name = StoreDescriptor::NameRegister();
752 Register value = StoreDescriptor::ValueRegister();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000753 Register dictionary = r8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400754 DCHECK(receiver.is(r4));
755 DCHECK(name.is(r5));
756 DCHECK(value.is(r3));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 DCHECK(VectorStoreICDescriptor::VectorRegister().is(r6));
758 DCHECK(VectorStoreICDescriptor::SlotRegister().is(r7));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400759
760 __ LoadP(dictionary, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
761
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000762 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r9, r10);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400763 Counters* counters = masm->isolate()->counters();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100764 __ IncrementCounter(counters->ic_store_normal_hit(), 1, r9, r10);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400765 __ Ret();
766
767 __ bind(&miss);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100768 __ IncrementCounter(counters->ic_store_normal_miss(), 1, r9, r10);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400769 GenerateMiss(masm);
770}
771
772
773#undef __
774
775
776Condition CompareIC::ComputeCondition(Token::Value op) {
777 switch (op) {
778 case Token::EQ_STRICT:
779 case Token::EQ:
780 return eq;
781 case Token::LT:
782 return lt;
783 case Token::GT:
784 return gt;
785 case Token::LTE:
786 return le;
787 case Token::GTE:
788 return ge;
789 default:
790 UNREACHABLE();
791 return kNoCondition;
792 }
793}
794
795
796bool CompareIC::HasInlinedSmiCode(Address address) {
797 // The address of the instruction following the call.
798 Address cmp_instruction_address =
799 Assembler::return_address_from_call_start(address);
800
801 // If the instruction following the call is not a cmp rx, #yyy, nothing
802 // was inlined.
803 Instr instr = Assembler::instr_at(cmp_instruction_address);
804 return Assembler::IsCmpImmediate(instr);
805}
806
807
808//
809// This code is paired with the JumpPatchSite class in full-codegen-ppc.cc
810//
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000811void PatchInlinedSmiCode(Isolate* isolate, Address address,
812 InlinedSmiCheck check) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400813 Address cmp_instruction_address =
814 Assembler::return_address_from_call_start(address);
815
816 // If the instruction following the call is not a cmp rx, #yyy, nothing
817 // was inlined.
818 Instr instr = Assembler::instr_at(cmp_instruction_address);
819 if (!Assembler::IsCmpImmediate(instr)) {
820 return;
821 }
822
823 // The delta to the start of the map check instruction and the
824 // condition code uses at the patched jump.
825 int delta = Assembler::GetCmpImmediateRawImmediate(instr);
826 delta += Assembler::GetCmpImmediateRegister(instr).code() * kOff16Mask;
827 // If the delta is 0 the instruction is cmp r0, #0 which also signals that
828 // nothing was inlined.
829 if (delta == 0) {
830 return;
831 }
832
833 if (FLAG_trace_ic) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100834 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
835 static_cast<void*>(address),
836 static_cast<void*>(cmp_instruction_address), delta);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400837 }
838
839 Address patch_address =
840 cmp_instruction_address - delta * Instruction::kInstrSize;
841 Instr instr_at_patch = Assembler::instr_at(patch_address);
842 Instr branch_instr =
843 Assembler::instr_at(patch_address + Instruction::kInstrSize);
844 // This is patching a conditional "jump if not smi/jump if smi" site.
845 // Enabling by changing from
846 // cmp cr0, rx, rx
847 // to
848 // rlwinm(r0, value, 0, 31, 31, SetRC);
849 // bc(label, BT/BF, 2)
850 // and vice-versa to be disabled again.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000851 CodePatcher patcher(isolate, patch_address, 2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400852 Register reg = Assembler::GetRA(instr_at_patch);
853 if (check == ENABLE_INLINED_SMI_CHECK) {
854 DCHECK(Assembler::IsCmpRegister(instr_at_patch));
855 DCHECK_EQ(Assembler::GetRA(instr_at_patch).code(),
856 Assembler::GetRB(instr_at_patch).code());
857 patcher.masm()->TestIfSmi(reg, r0);
858 } else {
859 DCHECK(check == DISABLE_INLINED_SMI_CHECK);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860 DCHECK(Assembler::IsAndi(instr_at_patch));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400861 patcher.masm()->cmp(reg, reg, cr0);
862 }
863 DCHECK(Assembler::IsBranch(branch_instr));
864
865 // Invert the logic of the branch
866 if (Assembler::GetCondition(branch_instr) == eq) {
867 patcher.EmitCondition(ne);
868 } else {
869 DCHECK(Assembler::GetCondition(branch_instr) == ne);
870 patcher.EmitCondition(eq);
871 }
872}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873} // namespace internal
874} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400875
876#endif // V8_TARGET_ARCH_PPC