blob: 558dcc4cbc63f8d139094a6191f16a5d5502e1eb [file] [log] [blame]
Alexandre Rames22aa54b2016-10-18 09:32:29 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "scheduler_arm64.h"
18#include "code_generator_utils.h"
19
20namespace art {
21namespace arm64 {
22
23void SchedulingLatencyVisitorARM64::VisitBinaryOperation(HBinaryOperation* instr) {
24 last_visited_latency_ = Primitive::IsFloatingPointType(instr->GetResultType())
25 ? kArm64FloatingPointOpLatency
26 : kArm64IntegerOpLatency;
27}
28
29void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight(
30 HBitwiseNegatedRight* ATTRIBUTE_UNUSED) {
31 last_visited_latency_ = kArm64IntegerOpLatency;
32}
33
Anton Kirilov74234da2017-01-13 14:42:47 +000034void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp(
35 HDataProcWithShifterOp* ATTRIBUTE_UNUSED) {
Alexandre Rames22aa54b2016-10-18 09:32:29 +010036 last_visited_latency_ = kArm64DataProcWithShifterOpLatency;
37}
38
39void SchedulingLatencyVisitorARM64::VisitIntermediateAddress(
40 HIntermediateAddress* ATTRIBUTE_UNUSED) {
41 // Although the code generated is a simple `add` instruction, we found through empirical results
42 // that spacing it from its use in memory accesses was beneficial.
43 last_visited_latency_ = kArm64IntegerOpLatency + 2;
44}
45
46void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
47 last_visited_latency_ = kArm64MulIntegerLatency;
48}
49
50void SchedulingLatencyVisitorARM64::VisitArrayGet(HArrayGet* instruction) {
51 if (!instruction->GetArray()->IsIntermediateAddress()) {
52 // Take the intermediate address computation into account.
53 last_visited_internal_latency_ = kArm64IntegerOpLatency;
54 }
55 last_visited_latency_ = kArm64MemoryLoadLatency;
56}
57
58void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) {
59 last_visited_latency_ = kArm64MemoryLoadLatency;
60}
61
62void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) {
63 last_visited_latency_ = kArm64MemoryStoreLatency;
64}
65
66void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
67 last_visited_internal_latency_ = kArm64IntegerOpLatency;
68 // Users do not use any data results.
69 last_visited_latency_ = 0;
70}
71
72void SchedulingLatencyVisitorARM64::VisitDiv(HDiv* instr) {
73 Primitive::Type type = instr->GetResultType();
74 switch (type) {
75 case Primitive::kPrimFloat:
76 last_visited_latency_ = kArm64DivFloatLatency;
77 break;
78 case Primitive::kPrimDouble:
79 last_visited_latency_ = kArm64DivDoubleLatency;
80 break;
81 default:
82 // Follow the code path used by code generation.
83 if (instr->GetRight()->IsConstant()) {
84 int64_t imm = Int64FromConstant(instr->GetRight()->AsConstant());
85 if (imm == 0) {
86 last_visited_internal_latency_ = 0;
87 last_visited_latency_ = 0;
88 } else if (imm == 1 || imm == -1) {
89 last_visited_internal_latency_ = 0;
90 last_visited_latency_ = kArm64IntegerOpLatency;
91 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
92 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
93 last_visited_latency_ = kArm64IntegerOpLatency;
94 } else {
95 DCHECK(imm <= -2 || imm >= 2);
96 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
97 last_visited_latency_ = kArm64MulIntegerLatency;
98 }
99 } else {
100 last_visited_latency_ = kArm64DivIntegerLatency;
101 }
102 break;
103 }
104}
105
106void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) {
107 last_visited_latency_ = kArm64MemoryLoadLatency;
108}
109
110void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
111 last_visited_internal_latency_ = kArm64CallInternalLatency;
112 last_visited_latency_ = kArm64IntegerOpLatency;
113}
114
115void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
116 last_visited_internal_latency_ = kArm64CallInternalLatency;
117 last_visited_latency_ = kArm64CallLatency;
118}
119
120void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
121 last_visited_internal_latency_ = kArm64LoadStringInternalLatency;
122 last_visited_latency_ = kArm64MemoryLoadLatency;
123}
124
125void SchedulingLatencyVisitorARM64::VisitMul(HMul* instr) {
126 last_visited_latency_ = Primitive::IsFloatingPointType(instr->GetResultType())
127 ? kArm64MulFloatingPointLatency
128 : kArm64MulIntegerLatency;
129}
130
131void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
132 last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency;
133 last_visited_latency_ = kArm64CallLatency;
134}
135
136void SchedulingLatencyVisitorARM64::VisitNewInstance(HNewInstance* instruction) {
137 if (instruction->IsStringAlloc()) {
138 last_visited_internal_latency_ = 2 + kArm64MemoryLoadLatency + kArm64CallInternalLatency;
139 } else {
140 last_visited_internal_latency_ = kArm64CallInternalLatency;
141 }
142 last_visited_latency_ = kArm64CallLatency;
143}
144
145void SchedulingLatencyVisitorARM64::VisitRem(HRem* instruction) {
146 if (Primitive::IsFloatingPointType(instruction->GetResultType())) {
147 last_visited_internal_latency_ = kArm64CallInternalLatency;
148 last_visited_latency_ = kArm64CallLatency;
149 } else {
150 // Follow the code path used by code generation.
151 if (instruction->GetRight()->IsConstant()) {
152 int64_t imm = Int64FromConstant(instruction->GetRight()->AsConstant());
153 if (imm == 0) {
154 last_visited_internal_latency_ = 0;
155 last_visited_latency_ = 0;
156 } else if (imm == 1 || imm == -1) {
157 last_visited_internal_latency_ = 0;
158 last_visited_latency_ = kArm64IntegerOpLatency;
159 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
160 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
161 last_visited_latency_ = kArm64IntegerOpLatency;
162 } else {
163 DCHECK(imm <= -2 || imm >= 2);
164 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
165 last_visited_latency_ = kArm64MulIntegerLatency;
166 }
167 } else {
168 last_visited_internal_latency_ = kArm64DivIntegerLatency;
169 last_visited_latency_ = kArm64MulIntegerLatency;
170 }
171 }
172}
173
174void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) {
175 last_visited_latency_ = kArm64MemoryLoadLatency;
176}
177
178void SchedulingLatencyVisitorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
179 HBasicBlock* block = instruction->GetBlock();
180 DCHECK((block->GetLoopInformation() != nullptr) ||
181 (block->IsEntryBlock() && instruction->GetNext()->IsGoto()));
182 // Users do not use any data results.
183 last_visited_latency_ = 0;
184}
185
186void SchedulingLatencyVisitorARM64::VisitTypeConversion(HTypeConversion* instr) {
187 if (Primitive::IsFloatingPointType(instr->GetResultType()) ||
188 Primitive::IsFloatingPointType(instr->GetInputType())) {
189 last_visited_latency_ = kArm64TypeConversionFloatingPointIntegerLatency;
190 } else {
191 last_visited_latency_ = kArm64IntegerOpLatency;
192 }
193}
194
195} // namespace arm64
196} // namespace art