blob: 45a620ccfcd3466db069478e8e069b311a5e59b3 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/ic/ic.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/ic/stub-cache.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/interface-descriptors.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011
12namespace v8 {
13namespace internal {
14
15#define __ ACCESS_MASM(masm)
16
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
Ben Murdochc5610432016-08-08 18:44:38 +010018 Code::Flags flags, StubCache::Table table,
19 Register receiver, Register name,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040020 // Number of the cache entry, not scaled.
21 Register offset, Register scratch, Register scratch2,
22 Register offset_scratch) {
23 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
24 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
25 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
26
27 uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address());
28 uintptr_t value_off_addr =
29 reinterpret_cast<uintptr_t>(value_offset.address());
30 uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address());
31
32 // Check the relative positions of the address fields.
33 DCHECK(value_off_addr > key_off_addr);
34 DCHECK((value_off_addr - key_off_addr) % 4 == 0);
35 DCHECK((value_off_addr - key_off_addr) < (256 * 4));
36 DCHECK(map_off_addr > key_off_addr);
37 DCHECK((map_off_addr - key_off_addr) % 4 == 0);
38 DCHECK((map_off_addr - key_off_addr) < (256 * 4));
39
40 Label miss;
41 Register base_addr = scratch;
42 scratch = no_reg;
43
44 // Multiply by 3 because there are 3 fields per entry (name, code, map).
45 __ ShiftLeftImm(offset_scratch, offset, Operand(1));
46 __ add(offset_scratch, offset, offset_scratch);
47
48 // Calculate the base address of the entry.
49 __ mov(base_addr, Operand(key_offset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050#if V8_TARGET_ARCH_PPC64
51 DCHECK(kPointerSizeLog2 > StubCache::kCacheIndexShift);
52 __ ShiftLeftImm(offset_scratch, offset_scratch,
53 Operand(kPointerSizeLog2 - StubCache::kCacheIndexShift));
54#else
55 DCHECK(kPointerSizeLog2 == StubCache::kCacheIndexShift);
56#endif
57 __ add(base_addr, base_addr, offset_scratch);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040058
59 // Check that the key in the entry matches the name.
60 __ LoadP(ip, MemOperand(base_addr, 0));
61 __ cmp(name, ip);
62 __ bne(&miss);
63
64 // Check the map matches.
65 __ LoadP(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
66 __ LoadP(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
67 __ cmp(ip, scratch2);
68 __ bne(&miss);
69
70 // Get the code entry from the cache.
71 Register code = scratch2;
72 scratch2 = no_reg;
73 __ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
74
75 // Check that the flags match what we're looking for.
76 Register flags_reg = base_addr;
77 base_addr = no_reg;
78 __ lwz(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
79
80 DCHECK(!r0.is(flags_reg));
81 __ li(r0, Operand(Code::kFlagsNotUsedInLookup));
82 __ andc(flags_reg, flags_reg, r0);
83 __ mov(r0, Operand(flags));
84 __ cmpl(flags_reg, r0);
85 __ bne(&miss);
86
87#ifdef DEBUG
88 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
89 __ b(&miss);
90 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
91 __ b(&miss);
92 }
93#endif
94
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 // Jump to the first instruction in the code stub.
96 __ addi(r0, code, Operand(Code::kHeaderSize - kHeapObjectTag));
97 __ mtctr(r0);
98 __ bctr();
99
100 // Miss: fall through.
101 __ bind(&miss);
102}
103
104
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000105void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind,
106 Code::Flags flags, Register receiver,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400107 Register name, Register scratch, Register extra,
108 Register extra2, Register extra3) {
109 Isolate* isolate = masm->isolate();
110 Label miss;
111
112#if V8_TARGET_ARCH_PPC64
113 // Make sure that code is valid. The multiplying code relies on the
114 // entry size being 24.
115 DCHECK(sizeof(Entry) == 24);
116#else
117 // Make sure that code is valid. The multiplying code relies on the
118 // entry size being 12.
119 DCHECK(sizeof(Entry) == 12);
120#endif
121
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122 // Make sure that there are no register conflicts.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000123 DCHECK(!AreAliased(receiver, name, scratch, extra, extra2, extra3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400124
125 // Check scratch, extra and extra2 registers are valid.
126 DCHECK(!scratch.is(no_reg));
127 DCHECK(!extra.is(no_reg));
128 DCHECK(!extra2.is(no_reg));
129 DCHECK(!extra3.is(no_reg));
130
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000131#ifdef DEBUG
132 // If vector-based ics are in use, ensure that scratch, extra, extra2 and
133 // extra3 don't conflict with the vector and slot registers, which need
134 // to be preserved for a handler call or miss.
135 if (IC::ICUseVector(ic_kind)) {
136 Register vector, slot;
137 if (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC) {
138 vector = VectorStoreICDescriptor::VectorRegister();
139 slot = VectorStoreICDescriptor::SlotRegister();
140 } else {
141 vector = LoadWithVectorDescriptor::VectorRegister();
142 slot = LoadWithVectorDescriptor::SlotRegister();
143 }
144 DCHECK(!AreAliased(vector, slot, scratch, extra, extra2, extra3));
145 }
146#endif
147
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148 Counters* counters = masm->isolate()->counters();
149 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, extra2,
150 extra3);
151
152 // Check that the receiver isn't a smi.
153 __ JumpIfSmi(receiver, &miss);
154
155 // Get the map of the receiver and compute the hash.
156 __ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
157 __ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
158 __ add(scratch, scratch, ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 __ xori(scratch, scratch, Operand(flags));
160 // The mask omits the last two bits because they are not part of the hash.
161 __ andi(scratch, scratch,
162 Operand((kPrimaryTableSize - 1) << kCacheIndexShift));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400163
164 // Probe the primary table.
Ben Murdochc5610432016-08-08 18:44:38 +0100165 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch, extra,
166 extra2, extra3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400167
168 // Primary miss: Compute hash for secondary probe.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 __ sub(scratch, scratch, name);
170 __ addi(scratch, scratch, Operand(flags));
171 __ andi(scratch, scratch,
172 Operand((kSecondaryTableSize - 1) << kCacheIndexShift));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400173
174 // Probe the secondary table.
Ben Murdochc5610432016-08-08 18:44:38 +0100175 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch, extra,
176 extra2, extra3);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400177
178 // Cache miss: Fall-through and let caller handle the miss by
179 // entering the runtime system.
180 __ bind(&miss);
181 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1, extra2,
182 extra3);
183}
184
185
186#undef __
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187} // namespace internal
188} // namespace v8
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400189
190#endif // V8_TARGET_ARCH_PPC