blob: a54ddcaf96c25aaa0609cfb83e598a691efd8d6d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X64
8
9#include "src/codegen.h"
10#include "src/ic/stub-cache.h"
11
12namespace v8 {
13namespace internal {
14
15#define __ ACCESS_MASM(masm)
16
17
18static void ProbeTable(Isolate* isolate, MacroAssembler* masm,
19 Code::Flags flags, bool leave_frame,
20 StubCache::Table table, Register receiver, Register name,
21 // The offset is scaled by 4, based on
22 // kCacheIndexShift, which is two bits
23 Register offset) {
24 // We need to scale up the pointer by 2 when the offset is scaled by less
25 // than the pointer size.
26 DCHECK(kPointerSize == kInt64Size
27 ? kPointerSizeLog2 == StubCache::kCacheIndexShift + 1
28 : kPointerSizeLog2 == StubCache::kCacheIndexShift);
29 ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
30
31 DCHECK_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
32 // The offset register holds the entry offset times four (due to masking
33 // and shifting optimizations).
34 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
35 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
36 Label miss;
37
38 // Multiply by 3 because there are 3 fields per entry (name, code, map).
39 __ leap(offset, Operand(offset, offset, times_2, 0));
40
41 __ LoadAddress(kScratchRegister, key_offset);
42
43 // Check that the key in the entry matches the name.
44 // Multiply entry offset by 16 to get the entry address. Since the
45 // offset register already holds the entry offset times four, multiply
46 // by a further four.
47 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
48 __ j(not_equal, &miss);
49
50 // Get the map entry from the cache.
51 // Use key_offset + kPointerSize * 2, rather than loading map_offset.
52 __ movp(kScratchRegister,
53 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
54 __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
55 __ j(not_equal, &miss);
56
57 // Get the code entry from the cache.
58 __ LoadAddress(kScratchRegister, value_offset);
59 __ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0));
60
61 // Check that the flags match what we're looking for.
62 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
63 __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
64 __ cmpl(offset, Immediate(flags));
65 __ j(not_equal, &miss);
66
67#ifdef DEBUG
68 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
69 __ jmp(&miss);
70 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
71 __ jmp(&miss);
72 }
73#endif
74
75 if (leave_frame) __ leave();
76
77 // Jump to the first instruction in the code stub.
78 __ addp(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
79 __ jmp(kScratchRegister);
80
81 __ bind(&miss);
82}
83
84
85void StubCache::GenerateProbe(MacroAssembler* masm, Code::Flags flags,
86 bool leave_frame, Register receiver,
87 Register name, Register scratch, Register extra,
88 Register extra2, Register extra3) {
89 Isolate* isolate = masm->isolate();
90 Label miss;
91 USE(extra); // The register extra is not used on the X64 platform.
92 USE(extra2); // The register extra2 is not used on the X64 platform.
93 USE(extra3); // The register extra2 is not used on the X64 platform.
94 // Make sure that code is valid. The multiplying code relies on the
95 // entry size being 3 * kPointerSize.
96 DCHECK(sizeof(Entry) == 3 * kPointerSize);
97
98 // Make sure the flags do not name a specific type.
99 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
100
101 // Make sure that there are no register conflicts.
102 DCHECK(!scratch.is(receiver));
103 DCHECK(!scratch.is(name));
104
105 // Check scratch register is valid, extra and extra2 are unused.
106 DCHECK(!scratch.is(no_reg));
107 DCHECK(extra2.is(no_reg));
108 DCHECK(extra3.is(no_reg));
109
110 Counters* counters = masm->isolate()->counters();
111 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
112
113 // Check that the receiver isn't a smi.
114 __ JumpIfSmi(receiver, &miss);
115
116 // Get the map of the receiver and compute the hash.
117 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
118 // Use only the low 32 bits of the map pointer.
119 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
120 __ xorp(scratch, Immediate(flags));
121 // We mask out the last two bits because they are not part of the hash and
122 // they are always 01 for maps. Also in the two 'and' instructions below.
123 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
124
125 // Probe the primary table.
126 ProbeTable(isolate, masm, flags, leave_frame, kPrimary, receiver, name,
127 scratch);
128
129 // Primary miss: Compute hash for secondary probe.
130 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
131 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
132 __ xorp(scratch, Immediate(flags));
133 __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
134 __ subl(scratch, name);
135 __ addl(scratch, Immediate(flags));
136 __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
137
138 // Probe the secondary table.
139 ProbeTable(isolate, masm, flags, leave_frame, kSecondary, receiver, name,
140 scratch);
141
142 // Cache miss: Fall-through and let caller handle the miss by
143 // entering the runtime system.
144 __ bind(&miss);
145 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
146}
147
148
149#undef __
150}
151} // namespace v8::internal
152
153#endif // V8_TARGET_ARCH_X64