blob: eb358dd8c482d4e576544bf6b092ed073b07051f [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/instruction-scheduler.h"
6
7namespace v8 {
8namespace internal {
9namespace compiler {
10
11bool InstructionScheduler::SchedulerSupported() { return true; }
12
13
14int InstructionScheduler::GetTargetInstructionFlags(
15 const Instruction* instr) const {
16 switch (instr->arch_opcode()) {
17 case kArm64Add:
18 case kArm64Add32:
19 case kArm64And:
20 case kArm64And32:
21 case kArm64Bic:
22 case kArm64Bic32:
23 case kArm64Clz:
24 case kArm64Clz32:
25 case kArm64Cmp:
26 case kArm64Cmp32:
27 case kArm64Cmn:
28 case kArm64Cmn32:
29 case kArm64Tst:
30 case kArm64Tst32:
31 case kArm64Or:
32 case kArm64Or32:
33 case kArm64Orn:
34 case kArm64Orn32:
35 case kArm64Eor:
36 case kArm64Eor32:
37 case kArm64Eon:
38 case kArm64Eon32:
39 case kArm64Sub:
40 case kArm64Sub32:
41 case kArm64Mul:
42 case kArm64Mul32:
43 case kArm64Smull:
44 case kArm64Umull:
45 case kArm64Madd:
46 case kArm64Madd32:
47 case kArm64Msub:
48 case kArm64Msub32:
49 case kArm64Mneg:
50 case kArm64Mneg32:
51 case kArm64Idiv:
52 case kArm64Idiv32:
53 case kArm64Udiv:
54 case kArm64Udiv32:
55 case kArm64Imod:
56 case kArm64Imod32:
57 case kArm64Umod:
58 case kArm64Umod32:
59 case kArm64Not:
60 case kArm64Not32:
61 case kArm64Lsl:
62 case kArm64Lsl32:
63 case kArm64Lsr:
64 case kArm64Lsr32:
65 case kArm64Asr:
66 case kArm64Asr32:
67 case kArm64Ror:
68 case kArm64Ror32:
69 case kArm64Mov32:
70 case kArm64Sxtb32:
71 case kArm64Sxth32:
72 case kArm64Sxtw:
73 case kArm64Sbfx32:
74 case kArm64Ubfx:
75 case kArm64Ubfx32:
76 case kArm64Ubfiz32:
77 case kArm64Bfi:
78 case kArm64Float32Cmp:
79 case kArm64Float32Add:
80 case kArm64Float32Sub:
81 case kArm64Float32Mul:
82 case kArm64Float32Div:
83 case kArm64Float32Max:
84 case kArm64Float32Min:
85 case kArm64Float32Abs:
86 case kArm64Float32Sqrt:
87 case kArm64Float32RoundDown:
88 case kArm64Float64Cmp:
89 case kArm64Float64Add:
90 case kArm64Float64Sub:
91 case kArm64Float64Mul:
92 case kArm64Float64Div:
93 case kArm64Float64Mod:
94 case kArm64Float64Max:
95 case kArm64Float64Min:
96 case kArm64Float64Abs:
97 case kArm64Float64Neg:
98 case kArm64Float64Sqrt:
99 case kArm64Float64RoundDown:
100 case kArm64Float64RoundTiesAway:
101 case kArm64Float64RoundTruncate:
102 case kArm64Float64RoundTiesEven:
103 case kArm64Float64RoundUp:
104 case kArm64Float32RoundTiesEven:
105 case kArm64Float32RoundTruncate:
106 case kArm64Float32RoundUp:
107 case kArm64Float32ToFloat64:
108 case kArm64Float64ToFloat32:
109 case kArm64Float64ToInt32:
110 case kArm64Float64ToUint32:
111 case kArm64Float32ToInt64:
112 case kArm64Float64ToInt64:
113 case kArm64Float32ToUint64:
114 case kArm64Float64ToUint64:
115 case kArm64Int32ToFloat64:
116 case kArm64Int64ToFloat32:
117 case kArm64Int64ToFloat64:
118 case kArm64Uint32ToFloat64:
119 case kArm64Uint64ToFloat32:
120 case kArm64Uint64ToFloat64:
121 case kArm64Float64ExtractLowWord32:
122 case kArm64Float64ExtractHighWord32:
123 case kArm64Float64InsertLowWord32:
124 case kArm64Float64InsertHighWord32:
125 case kArm64Float64MoveU64:
126 case kArm64U64MoveFloat64:
127 return kNoOpcodeFlags;
128
129 case kArm64TestAndBranch32:
130 case kArm64TestAndBranch:
131 case kArm64CompareAndBranch32:
132 return kIsBlockTerminator;
133
134 case kArm64LdrS:
135 case kArm64LdrD:
136 case kArm64Ldrb:
137 case kArm64Ldrsb:
138 case kArm64Ldrh:
139 case kArm64Ldrsh:
140 case kArm64LdrW:
141 case kArm64Ldr:
142 return kIsLoadOperation;
143
144 case kArm64ClaimForCallArguments:
145 case kArm64Poke:
146 case kArm64PokePair:
147 case kArm64StrS:
148 case kArm64StrD:
149 case kArm64Strb:
150 case kArm64Strh:
151 case kArm64StrW:
152 case kArm64Str:
153 return kHasSideEffect;
154
155#define CASE(Name) case k##Name:
156 COMMON_ARCH_OPCODE_LIST(CASE)
157#undef CASE
158 // Already covered in architecture independent code.
159 UNREACHABLE();
160 }
161
162 UNREACHABLE();
163 return kNoOpcodeFlags;
164}
165
166
167int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
168 // Basic latency modeling for arm64 instructions. They have been determined
169 // in an empirical way.
170 switch (instr->arch_opcode()) {
171 case kArm64Float32ToFloat64:
172 case kArm64Float64ToFloat32:
173 case kArm64Float64ToInt32:
174 case kArm64Float64ToUint32:
175 case kArm64Int32ToFloat64:
176 case kArm64Uint32ToFloat64:
177 return 3;
178
179 case kArm64Float64Add:
180 case kArm64Float64Sub:
181 return 2;
182
183 case kArm64Float64Mul:
184 return 3;
185
186 case kArm64Float64Div:
187 return 6;
188
189 case kArm64Lsl:
190 case kArm64Lsl32:
191 case kArm64Lsr:
192 case kArm64Lsr32:
193 case kArm64Asr:
194 case kArm64Asr32:
195 case kArm64Ror:
196 case kArm64Ror32:
197 return 3;
198
199 case kCheckedLoadInt8:
200 case kCheckedLoadUint8:
201 case kCheckedLoadInt16:
202 case kCheckedLoadUint16:
203 case kCheckedLoadWord32:
204 case kCheckedLoadWord64:
205 case kCheckedLoadFloat32:
206 case kCheckedLoadFloat64:
207 case kArm64LdrS:
208 case kArm64LdrD:
209 case kArm64Ldrb:
210 case kArm64Ldrsb:
211 case kArm64Ldrh:
212 case kArm64Ldrsh:
213 case kArm64LdrW:
214 case kArm64Ldr:
215 return 5;
216
217 default:
218 return 1;
219 }
220}
221
222} // namespace compiler
223} // namespace internal
224} // namespace v8