| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2016 The Android Open Source Project | 
 | 3 |  * | 
 | 4 |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
 | 5 |  * you may not use this file except in compliance with the License. | 
 | 6 |  * You may obtain a copy of the License at | 
 | 7 |  * | 
 | 8 |  *      http://www.apache.org/licenses/LICENSE-2.0 | 
 | 9 |  * | 
 | 10 |  * Unless required by applicable law or agreed to in writing, software | 
 | 11 |  * distributed under the License is distributed on an "AS IS" BASIS, | 
 | 12 |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
 | 13 |  * See the License for the specific language governing permissions and | 
 | 14 |  * limitations under the License. | 
 | 15 |  */ | 
 | 16 |  | 
 | 17 | #include "scheduler_arm64.h" | 
| Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 18 |  | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 19 | #include "code_generator_utils.h" | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 20 | #include "mirror/array-inl.h" | 
| Andreas Gampe | 895f922 | 2017-07-05 09:53:32 -0700 | [diff] [blame] | 21 | #include "mirror/string.h" | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 22 |  | 
 | 23 | namespace art { | 
 | 24 | namespace arm64 { | 
 | 25 |  | 
 | 26 | void SchedulingLatencyVisitorARM64::VisitBinaryOperation(HBinaryOperation* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 27 |   last_visited_latency_ = DataType::IsFloatingPointType(instr->GetResultType()) | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 28 |       ? kArm64FloatingPointOpLatency | 
 | 29 |       : kArm64IntegerOpLatency; | 
 | 30 | } | 
 | 31 |  | 
 | 32 | void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight( | 
 | 33 |     HBitwiseNegatedRight* ATTRIBUTE_UNUSED) { | 
 | 34 |   last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 35 | } | 
 | 36 |  | 
| Anton Kirilov | 74234da | 2017-01-13 14:42:47 +0000 | [diff] [blame] | 37 | void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp( | 
 | 38 |     HDataProcWithShifterOp* ATTRIBUTE_UNUSED) { | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 39 |   last_visited_latency_ = kArm64DataProcWithShifterOpLatency; | 
 | 40 | } | 
 | 41 |  | 
 | 42 | void SchedulingLatencyVisitorARM64::VisitIntermediateAddress( | 
 | 43 |     HIntermediateAddress* ATTRIBUTE_UNUSED) { | 
 | 44 |   // Although the code generated is a simple `add` instruction, we found through empirical results | 
 | 45 |   // that spacing it from its use in memory accesses was beneficial. | 
 | 46 |   last_visited_latency_ = kArm64IntegerOpLatency + 2; | 
 | 47 | } | 
 | 48 |  | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 49 | void SchedulingLatencyVisitorARM64::VisitIntermediateAddressIndex( | 
 | 50 |     HIntermediateAddressIndex* instr ATTRIBUTE_UNUSED) { | 
 | 51 |   // Although the code generated is a simple `add` instruction, we found through empirical results | 
 | 52 |   // that spacing it from its use in memory accesses was beneficial. | 
 | 53 |   last_visited_latency_ = kArm64DataProcWithShifterOpLatency + 2; | 
 | 54 | } | 
 | 55 |  | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 56 | void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) { | 
 | 57 |   last_visited_latency_ = kArm64MulIntegerLatency; | 
 | 58 | } | 
 | 59 |  | 
 | 60 | void SchedulingLatencyVisitorARM64::VisitArrayGet(HArrayGet* instruction) { | 
 | 61 |   if (!instruction->GetArray()->IsIntermediateAddress()) { | 
 | 62 |     // Take the intermediate address computation into account. | 
 | 63 |     last_visited_internal_latency_ = kArm64IntegerOpLatency; | 
 | 64 |   } | 
 | 65 |   last_visited_latency_ = kArm64MemoryLoadLatency; | 
 | 66 | } | 
 | 67 |  | 
 | 68 | void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) { | 
 | 69 |   last_visited_latency_ = kArm64MemoryLoadLatency; | 
 | 70 | } | 
 | 71 |  | 
 | 72 | void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) { | 
 | 73 |   last_visited_latency_ = kArm64MemoryStoreLatency; | 
 | 74 | } | 
 | 75 |  | 
 | 76 | void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) { | 
 | 77 |   last_visited_internal_latency_ = kArm64IntegerOpLatency; | 
 | 78 |   // Users do not use any data results. | 
 | 79 |   last_visited_latency_ = 0; | 
 | 80 | } | 
 | 81 |  | 
 | 82 | void SchedulingLatencyVisitorARM64::VisitDiv(HDiv* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 83 |   DataType::Type type = instr->GetResultType(); | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 84 |   switch (type) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 85 |     case DataType::Type::kFloat32: | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 86 |       last_visited_latency_ = kArm64DivFloatLatency; | 
 | 87 |       break; | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 88 |     case DataType::Type::kFloat64: | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 89 |       last_visited_latency_ = kArm64DivDoubleLatency; | 
 | 90 |       break; | 
 | 91 |     default: | 
 | 92 |       // Follow the code path used by code generation. | 
 | 93 |       if (instr->GetRight()->IsConstant()) { | 
 | 94 |         int64_t imm = Int64FromConstant(instr->GetRight()->AsConstant()); | 
 | 95 |         if (imm == 0) { | 
 | 96 |           last_visited_internal_latency_ = 0; | 
 | 97 |           last_visited_latency_ = 0; | 
 | 98 |         } else if (imm == 1 || imm == -1) { | 
 | 99 |           last_visited_internal_latency_ = 0; | 
 | 100 |           last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 101 |         } else if (IsPowerOfTwo(AbsOrMin(imm))) { | 
 | 102 |           last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency; | 
 | 103 |           last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 104 |         } else { | 
 | 105 |           DCHECK(imm <= -2 || imm >= 2); | 
 | 106 |           last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency; | 
 | 107 |           last_visited_latency_ = kArm64MulIntegerLatency; | 
 | 108 |         } | 
 | 109 |       } else { | 
 | 110 |         last_visited_latency_ = kArm64DivIntegerLatency; | 
 | 111 |       } | 
 | 112 |       break; | 
 | 113 |   } | 
 | 114 | } | 
 | 115 |  | 
 | 116 | void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) { | 
 | 117 |   last_visited_latency_ = kArm64MemoryLoadLatency; | 
 | 118 | } | 
 | 119 |  | 
 | 120 | void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) { | 
 | 121 |   last_visited_internal_latency_ = kArm64CallInternalLatency; | 
 | 122 |   last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 123 | } | 
 | 124 |  | 
 | 125 | void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) { | 
 | 126 |   last_visited_internal_latency_ = kArm64CallInternalLatency; | 
 | 127 |   last_visited_latency_ = kArm64CallLatency; | 
 | 128 | } | 
 | 129 |  | 
 | 130 | void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) { | 
 | 131 |   last_visited_internal_latency_ = kArm64LoadStringInternalLatency; | 
 | 132 |   last_visited_latency_ = kArm64MemoryLoadLatency; | 
 | 133 | } | 
 | 134 |  | 
 | 135 | void SchedulingLatencyVisitorARM64::VisitMul(HMul* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 136 |   last_visited_latency_ = DataType::IsFloatingPointType(instr->GetResultType()) | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 137 |       ? kArm64MulFloatingPointLatency | 
 | 138 |       : kArm64MulIntegerLatency; | 
 | 139 | } | 
 | 140 |  | 
 | 141 | void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) { | 
 | 142 |   last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency; | 
 | 143 |   last_visited_latency_ = kArm64CallLatency; | 
 | 144 | } | 
 | 145 |  | 
 | 146 | void SchedulingLatencyVisitorARM64::VisitNewInstance(HNewInstance* instruction) { | 
 | 147 |   if (instruction->IsStringAlloc()) { | 
 | 148 |     last_visited_internal_latency_ = 2 + kArm64MemoryLoadLatency + kArm64CallInternalLatency; | 
 | 149 |   } else { | 
 | 150 |     last_visited_internal_latency_ = kArm64CallInternalLatency; | 
 | 151 |   } | 
 | 152 |   last_visited_latency_ = kArm64CallLatency; | 
 | 153 | } | 
 | 154 |  | 
 | 155 | void SchedulingLatencyVisitorARM64::VisitRem(HRem* instruction) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 156 |   if (DataType::IsFloatingPointType(instruction->GetResultType())) { | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 157 |     last_visited_internal_latency_ = kArm64CallInternalLatency; | 
 | 158 |     last_visited_latency_ = kArm64CallLatency; | 
 | 159 |   } else { | 
 | 160 |     // Follow the code path used by code generation. | 
 | 161 |     if (instruction->GetRight()->IsConstant()) { | 
 | 162 |       int64_t imm = Int64FromConstant(instruction->GetRight()->AsConstant()); | 
 | 163 |       if (imm == 0) { | 
 | 164 |         last_visited_internal_latency_ = 0; | 
 | 165 |         last_visited_latency_ = 0; | 
 | 166 |       } else if (imm == 1 || imm == -1) { | 
 | 167 |         last_visited_internal_latency_ = 0; | 
 | 168 |         last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 169 |       } else if (IsPowerOfTwo(AbsOrMin(imm))) { | 
 | 170 |         last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency; | 
 | 171 |         last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 172 |       } else { | 
 | 173 |         DCHECK(imm <= -2 || imm >= 2); | 
 | 174 |         last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency; | 
 | 175 |         last_visited_latency_ = kArm64MulIntegerLatency; | 
 | 176 |       } | 
 | 177 |     } else { | 
 | 178 |       last_visited_internal_latency_ = kArm64DivIntegerLatency; | 
 | 179 |       last_visited_latency_ = kArm64MulIntegerLatency; | 
 | 180 |     } | 
 | 181 |   } | 
 | 182 | } | 
 | 183 |  | 
 | 184 | void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) { | 
 | 185 |   last_visited_latency_ = kArm64MemoryLoadLatency; | 
 | 186 | } | 
 | 187 |  | 
 | 188 | void SchedulingLatencyVisitorARM64::VisitSuspendCheck(HSuspendCheck* instruction) { | 
 | 189 |   HBasicBlock* block = instruction->GetBlock(); | 
 | 190 |   DCHECK((block->GetLoopInformation() != nullptr) || | 
 | 191 |          (block->IsEntryBlock() && instruction->GetNext()->IsGoto())); | 
 | 192 |   // Users do not use any data results. | 
 | 193 |   last_visited_latency_ = 0; | 
 | 194 | } | 
 | 195 |  | 
 | 196 | void SchedulingLatencyVisitorARM64::VisitTypeConversion(HTypeConversion* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 197 |   if (DataType::IsFloatingPointType(instr->GetResultType()) || | 
 | 198 |       DataType::IsFloatingPointType(instr->GetInputType())) { | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 199 |     last_visited_latency_ = kArm64TypeConversionFloatingPointIntegerLatency; | 
 | 200 |   } else { | 
 | 201 |     last_visited_latency_ = kArm64IntegerOpLatency; | 
 | 202 |   } | 
 | 203 | } | 
 | 204 |  | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 205 | void SchedulingLatencyVisitorARM64::HandleSimpleArithmeticSIMD(HVecOperation *instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 206 |   if (DataType::IsFloatingPointType(instr->GetPackedType())) { | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 207 |     last_visited_latency_ = kArm64SIMDFloatingPointOpLatency; | 
 | 208 |   } else { | 
 | 209 |     last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 210 |   } | 
 | 211 | } | 
 | 212 |  | 
 | 213 | void SchedulingLatencyVisitorARM64::VisitVecReplicateScalar( | 
 | 214 |     HVecReplicateScalar* instr ATTRIBUTE_UNUSED) { | 
 | 215 |   last_visited_latency_ = kArm64SIMDReplicateOpLatency; | 
 | 216 | } | 
 | 217 |  | 
| Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 218 | void SchedulingLatencyVisitorARM64::VisitVecExtractScalar(HVecExtractScalar* instr) { | 
 | 219 |   HandleSimpleArithmeticSIMD(instr); | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 220 | } | 
 | 221 |  | 
| Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 222 | void SchedulingLatencyVisitorARM64::VisitVecReduce(HVecReduce* instr) { | 
 | 223 |   HandleSimpleArithmeticSIMD(instr); | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 224 | } | 
 | 225 |  | 
 | 226 | void SchedulingLatencyVisitorARM64::VisitVecCnv(HVecCnv* instr ATTRIBUTE_UNUSED) { | 
 | 227 |   last_visited_latency_ = kArm64SIMDTypeConversionInt2FPLatency; | 
 | 228 | } | 
 | 229 |  | 
 | 230 | void SchedulingLatencyVisitorARM64::VisitVecNeg(HVecNeg* instr) { | 
 | 231 |   HandleSimpleArithmeticSIMD(instr); | 
 | 232 | } | 
 | 233 |  | 
 | 234 | void SchedulingLatencyVisitorARM64::VisitVecAbs(HVecAbs* instr) { | 
 | 235 |   HandleSimpleArithmeticSIMD(instr); | 
 | 236 | } | 
 | 237 |  | 
 | 238 | void SchedulingLatencyVisitorARM64::VisitVecNot(HVecNot* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 239 |   if (instr->GetPackedType() == DataType::Type::kBool) { | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 240 |     last_visited_internal_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 241 |   } | 
 | 242 |   last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 243 | } | 
 | 244 |  | 
 | 245 | void SchedulingLatencyVisitorARM64::VisitVecAdd(HVecAdd* instr) { | 
 | 246 |   HandleSimpleArithmeticSIMD(instr); | 
 | 247 | } | 
 | 248 |  | 
 | 249 | void SchedulingLatencyVisitorARM64::VisitVecHalvingAdd(HVecHalvingAdd* instr) { | 
 | 250 |   HandleSimpleArithmeticSIMD(instr); | 
 | 251 | } | 
 | 252 |  | 
 | 253 | void SchedulingLatencyVisitorARM64::VisitVecSub(HVecSub* instr) { | 
 | 254 |   HandleSimpleArithmeticSIMD(instr); | 
 | 255 | } | 
 | 256 |  | 
 | 257 | void SchedulingLatencyVisitorARM64::VisitVecMul(HVecMul* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 258 |   if (DataType::IsFloatingPointType(instr->GetPackedType())) { | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 259 |     last_visited_latency_ = kArm64SIMDMulFloatingPointLatency; | 
 | 260 |   } else { | 
 | 261 |     last_visited_latency_ = kArm64SIMDMulIntegerLatency; | 
 | 262 |   } | 
 | 263 | } | 
 | 264 |  | 
 | 265 | void SchedulingLatencyVisitorARM64::VisitVecDiv(HVecDiv* instr) { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 266 |   if (instr->GetPackedType() == DataType::Type::kFloat32) { | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 267 |     last_visited_latency_ = kArm64SIMDDivFloatLatency; | 
 | 268 |   } else { | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 269 |     DCHECK(instr->GetPackedType() == DataType::Type::kFloat64); | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 270 |     last_visited_latency_ = kArm64SIMDDivDoubleLatency; | 
 | 271 |   } | 
 | 272 | } | 
 | 273 |  | 
 | 274 | void SchedulingLatencyVisitorARM64::VisitVecMin(HVecMin* instr) { | 
 | 275 |   HandleSimpleArithmeticSIMD(instr); | 
 | 276 | } | 
 | 277 |  | 
 | 278 | void SchedulingLatencyVisitorARM64::VisitVecMax(HVecMax* instr) { | 
 | 279 |   HandleSimpleArithmeticSIMD(instr); | 
 | 280 | } | 
 | 281 |  | 
 | 282 | void SchedulingLatencyVisitorARM64::VisitVecAnd(HVecAnd* instr ATTRIBUTE_UNUSED) { | 
 | 283 |   last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 284 | } | 
 | 285 |  | 
| Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 286 | void SchedulingLatencyVisitorARM64::VisitVecAndNot(HVecAndNot* instr ATTRIBUTE_UNUSED) { | 
 | 287 |   last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 288 | } | 
 | 289 |  | 
 | 290 | void SchedulingLatencyVisitorARM64::VisitVecOr(HVecOr* instr ATTRIBUTE_UNUSED) { | 
 | 291 |   last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 292 | } | 
 | 293 |  | 
 | 294 | void SchedulingLatencyVisitorARM64::VisitVecXor(HVecXor* instr ATTRIBUTE_UNUSED) { | 
 | 295 |   last_visited_latency_ = kArm64SIMDIntegerOpLatency; | 
 | 296 | } | 
 | 297 |  | 
 | 298 | void SchedulingLatencyVisitorARM64::VisitVecShl(HVecShl* instr) { | 
 | 299 |   HandleSimpleArithmeticSIMD(instr); | 
 | 300 | } | 
 | 301 |  | 
 | 302 | void SchedulingLatencyVisitorARM64::VisitVecShr(HVecShr* instr) { | 
 | 303 |   HandleSimpleArithmeticSIMD(instr); | 
 | 304 | } | 
 | 305 |  | 
 | 306 | void SchedulingLatencyVisitorARM64::VisitVecUShr(HVecUShr* instr) { | 
 | 307 |   HandleSimpleArithmeticSIMD(instr); | 
 | 308 | } | 
 | 309 |  | 
| Aart Bik | 0148de4 | 2017-09-05 09:25:01 -0700 | [diff] [blame] | 310 | void SchedulingLatencyVisitorARM64::VisitVecSetScalars(HVecSetScalars* instr) { | 
 | 311 |   HandleSimpleArithmeticSIMD(instr); | 
 | 312 | } | 
 | 313 |  | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 314 | void SchedulingLatencyVisitorARM64::VisitVecMultiplyAccumulate( | 
 | 315 |     HVecMultiplyAccumulate* instr ATTRIBUTE_UNUSED) { | 
 | 316 |   last_visited_latency_ = kArm64SIMDMulIntegerLatency; | 
 | 317 | } | 
 | 318 |  | 
 | 319 | void SchedulingLatencyVisitorARM64::HandleVecAddress( | 
 | 320 |     HVecMemoryOperation* instruction, | 
 | 321 |     size_t size ATTRIBUTE_UNUSED) { | 
 | 322 |   HInstruction* index = instruction->InputAt(1); | 
 | 323 |   if (!index->IsConstant()) { | 
 | 324 |     last_visited_internal_latency_ += kArm64DataProcWithShifterOpLatency; | 
 | 325 |   } | 
 | 326 | } | 
 | 327 |  | 
 | 328 | void SchedulingLatencyVisitorARM64::VisitVecLoad(HVecLoad* instr) { | 
 | 329 |   last_visited_internal_latency_ = 0; | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 330 |   size_t size = DataType::Size(instr->GetPackedType()); | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 331 |  | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 332 |   if (instr->GetPackedType() == DataType::Type::kUint16 | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 333 |       && mirror::kUseStringCompression | 
 | 334 |       && instr->IsStringCharAt()) { | 
 | 335 |     // Set latencies for the uncompressed case. | 
 | 336 |     last_visited_internal_latency_ += kArm64MemoryLoadLatency + kArm64BranchLatency; | 
 | 337 |     HandleVecAddress(instr, size); | 
 | 338 |     last_visited_latency_ = kArm64SIMDMemoryLoadLatency; | 
 | 339 |   } else { | 
 | 340 |     HandleVecAddress(instr, size); | 
 | 341 |     last_visited_latency_ = kArm64SIMDMemoryLoadLatency; | 
 | 342 |   } | 
 | 343 | } | 
 | 344 |  | 
 | 345 | void SchedulingLatencyVisitorARM64::VisitVecStore(HVecStore* instr) { | 
 | 346 |   last_visited_internal_latency_ = 0; | 
| Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 347 |   size_t size = DataType::Size(instr->GetPackedType()); | 
| Artem Serov | f0fc4c6 | 2017-05-03 15:07:15 +0100 | [diff] [blame] | 348 |   HandleVecAddress(instr, size); | 
 | 349 |   last_visited_latency_ = kArm64SIMDMemoryStoreLatency; | 
 | 350 | } | 
 | 351 |  | 
| Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 352 | }  // namespace arm64 | 
 | 353 | }  // namespace art |