Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "codegen_x86.h" |
| 18 | #include "dex/quick/mir_to_lir-inl.h" |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 19 | #include "dex/reg_storage_eq.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "x86_lir.h" |
| 21 | |
| 22 | namespace art { |
| 23 | |
| 24 | void X86Mir2Lir::GenArithOpFloat(Instruction::Code opcode, |
| 25 | RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) { |
| 26 | X86OpCode op = kX86Nop; |
| 27 | RegLocation rl_result; |
| 28 | |
| 29 | /* |
| 30 | * Don't attempt to optimize register usage since these opcodes call out to |
| 31 | * the handlers. |
| 32 | */ |
| 33 | switch (opcode) { |
| 34 | case Instruction::ADD_FLOAT_2ADDR: |
| 35 | case Instruction::ADD_FLOAT: |
| 36 | op = kX86AddssRR; |
| 37 | break; |
| 38 | case Instruction::SUB_FLOAT_2ADDR: |
| 39 | case Instruction::SUB_FLOAT: |
| 40 | op = kX86SubssRR; |
| 41 | break; |
| 42 | case Instruction::DIV_FLOAT_2ADDR: |
| 43 | case Instruction::DIV_FLOAT: |
| 44 | op = kX86DivssRR; |
| 45 | break; |
| 46 | case Instruction::MUL_FLOAT_2ADDR: |
| 47 | case Instruction::MUL_FLOAT: |
| 48 | op = kX86MulssRR; |
| 49 | break; |
| 50 | case Instruction::REM_FLOAT_2ADDR: |
| 51 | case Instruction::REM_FLOAT: |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 52 | GenRemFP(rl_dest, rl_src1, rl_src2, false /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 53 | return; |
| 54 | case Instruction::NEG_FLOAT: |
| 55 | GenNegFloat(rl_dest, rl_src1); |
| 56 | return; |
| 57 | default: |
| 58 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 59 | } |
| 60 | rl_src1 = LoadValue(rl_src1, kFPReg); |
| 61 | rl_src2 = LoadValue(rl_src2, kFPReg); |
| 62 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 63 | RegStorage r_dest = rl_result.reg; |
| 64 | RegStorage r_src1 = rl_src1.reg; |
| 65 | RegStorage r_src2 = rl_src2.reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 66 | if (r_dest == r_src2) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 67 | r_src2 = AllocTempSingle(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 68 | OpRegCopy(r_src2, r_dest); |
| 69 | } |
| 70 | OpRegCopy(r_dest, r_src1); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 71 | NewLIR2(op, r_dest.GetReg(), r_src2.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 72 | StoreValue(rl_dest, rl_result); |
| 73 | } |
| 74 | |
| 75 | void X86Mir2Lir::GenArithOpDouble(Instruction::Code opcode, |
| 76 | RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 77 | DCHECK(rl_dest.wide); |
| 78 | DCHECK(rl_dest.fp); |
| 79 | DCHECK(rl_src1.wide); |
| 80 | DCHECK(rl_src1.fp); |
| 81 | DCHECK(rl_src2.wide); |
| 82 | DCHECK(rl_src2.fp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 83 | X86OpCode op = kX86Nop; |
| 84 | RegLocation rl_result; |
| 85 | |
| 86 | switch (opcode) { |
| 87 | case Instruction::ADD_DOUBLE_2ADDR: |
| 88 | case Instruction::ADD_DOUBLE: |
| 89 | op = kX86AddsdRR; |
| 90 | break; |
| 91 | case Instruction::SUB_DOUBLE_2ADDR: |
| 92 | case Instruction::SUB_DOUBLE: |
| 93 | op = kX86SubsdRR; |
| 94 | break; |
| 95 | case Instruction::DIV_DOUBLE_2ADDR: |
| 96 | case Instruction::DIV_DOUBLE: |
| 97 | op = kX86DivsdRR; |
| 98 | break; |
| 99 | case Instruction::MUL_DOUBLE_2ADDR: |
| 100 | case Instruction::MUL_DOUBLE: |
| 101 | op = kX86MulsdRR; |
| 102 | break; |
| 103 | case Instruction::REM_DOUBLE_2ADDR: |
| 104 | case Instruction::REM_DOUBLE: |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 105 | GenRemFP(rl_dest, rl_src1, rl_src2, true /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 106 | return; |
| 107 | case Instruction::NEG_DOUBLE: |
| 108 | GenNegDouble(rl_dest, rl_src1); |
| 109 | return; |
| 110 | default: |
| 111 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 112 | } |
| 113 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 114 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 115 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 116 | if (rl_result.reg == rl_src2.reg) { |
| 117 | rl_src2.reg = AllocTempDouble(); |
| 118 | OpRegCopy(rl_src2.reg, rl_result.reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 119 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 120 | OpRegCopy(rl_result.reg, rl_src1.reg); |
| 121 | NewLIR2(op, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 122 | StoreValueWide(rl_dest, rl_result); |
| 123 | } |
| 124 | |
Ningsheng Jian | 675e09b | 2014-10-23 13:48:36 +0800 | [diff] [blame] | 125 | void X86Mir2Lir::GenMultiplyByConstantFloat(RegLocation rl_dest, RegLocation rl_src1, |
| 126 | int32_t constant) { |
| 127 | // TODO: need x86 implementation. |
| 128 | UNUSED(rl_dest, rl_src1, constant); |
| 129 | LOG(FATAL) << "Unimplemented GenMultiplyByConstantFloat in x86"; |
| 130 | } |
| 131 | |
| 132 | void X86Mir2Lir::GenMultiplyByConstantDouble(RegLocation rl_dest, RegLocation rl_src1, |
| 133 | int64_t constant) { |
| 134 | // TODO: need x86 implementation. |
| 135 | UNUSED(rl_dest, rl_src1, constant); |
| 136 | LOG(FATAL) << "Unimplemented GenMultiplyByConstantDouble in x86"; |
| 137 | } |
| 138 | |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 139 | void X86Mir2Lir::GenLongToFP(RegLocation rl_dest, RegLocation rl_src, bool is_double) { |
| 140 | // Compute offsets to the source and destination VRs on stack |
| 141 | int src_v_reg_offset = SRegOffset(rl_src.s_reg_low); |
| 142 | int dest_v_reg_offset = SRegOffset(rl_dest.s_reg_low); |
| 143 | |
| 144 | // Update the in-register state of source. |
| 145 | rl_src = UpdateLocWide(rl_src); |
| 146 | |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 147 | // All memory accesses below reference dalvik regs. |
| 148 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
| 149 | |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 150 | // If the source is in physical register, then put it in its location on stack. |
| 151 | if (rl_src.location == kLocPhysReg) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 152 | RegisterInfo* reg_info = GetRegInfo(rl_src.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 153 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 154 | if (reg_info != nullptr && reg_info->IsTemp()) { |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 155 | // Calling FlushSpecificReg because it will only write back VR if it is dirty. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 156 | FlushSpecificReg(reg_info); |
| 157 | // ResetDef to prevent NullifyRange from removing stores. |
| 158 | ResetDef(rl_src.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 159 | } else { |
| 160 | // It must have been register promoted if it is not a temp but is still in physical |
| 161 | // register. Since we need it to be in memory to convert, we place it there now. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 162 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
| 163 | StoreBaseDisp(rs_rSP, src_v_reg_offset, rl_src.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 164 | } |
| 165 | } |
| 166 | |
| 167 | // Push the source virtual register onto the x87 stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 168 | LIR *fild64 = NewLIR2NoDest(kX86Fild64M, rs_rX86_SP_32.GetReg(), |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 169 | src_v_reg_offset + LOWORD_OFFSET); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 170 | AnnotateDalvikRegAccess(fild64, (src_v_reg_offset + LOWORD_OFFSET) >> 2, |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 171 | true /* is_load */, true /* is64bit */); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 172 | |
| 173 | // Now pop off x87 stack and store it in the destination VR's stack location. |
| 174 | int opcode = is_double ? kX86Fstp64M : kX86Fstp32M; |
| 175 | int displacement = is_double ? dest_v_reg_offset + LOWORD_OFFSET : dest_v_reg_offset; |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 176 | LIR *fstp = NewLIR2NoDest(opcode, rs_rX86_SP_32.GetReg(), displacement); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 177 | AnnotateDalvikRegAccess(fstp, displacement >> 2, false /* is_load */, is_double); |
| 178 | |
| 179 | /* |
| 180 | * The result is in a physical register if it was in a temp or was register |
| 181 | * promoted. For that reason it is enough to check if it is in physical |
| 182 | * register. If it is, then we must do all of the bookkeeping necessary to |
| 183 | * invalidate temp (if needed) and load in promoted register (if needed). |
| 184 | * If the result's location is in memory, then we do not need to do anything |
| 185 | * more since the fstp has already placed the correct value in memory. |
| 186 | */ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 187 | RegLocation rl_result = is_double ? UpdateLocWideTyped(rl_dest) : UpdateLocTyped(rl_dest); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 188 | if (rl_result.location == kLocPhysReg) { |
| 189 | /* |
| 190 | * We already know that the result is in a physical register but do not know if it is the |
| 191 | * right class. So we call EvalLoc(Wide) first which will ensure that it will get moved to the |
| 192 | * correct register class. |
| 193 | */ |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 194 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 195 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 196 | if (is_double) { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 197 | LoadBaseDisp(rs_rSP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 198 | |
Maxim Kazantsev | 51a80d7 | 2014-03-06 11:33:26 +0700 | [diff] [blame] | 199 | StoreFinalValueWide(rl_dest, rl_result); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 200 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 201 | Load32Disp(rs_rSP, dest_v_reg_offset, rl_result.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 202 | |
Maxim Kazantsev | 51a80d7 | 2014-03-06 11:33:26 +0700 | [diff] [blame] | 203 | StoreFinalValue(rl_dest, rl_result); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 204 | } |
| 205 | } |
| 206 | } |
| 207 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 208 | void X86Mir2Lir::GenConversion(Instruction::Code opcode, RegLocation rl_dest, |
| 209 | RegLocation rl_src) { |
| 210 | RegisterClass rcSrc = kFPReg; |
| 211 | X86OpCode op = kX86Nop; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 212 | RegLocation rl_result; |
| 213 | switch (opcode) { |
| 214 | case Instruction::INT_TO_FLOAT: |
| 215 | rcSrc = kCoreReg; |
| 216 | op = kX86Cvtsi2ssRR; |
| 217 | break; |
| 218 | case Instruction::DOUBLE_TO_FLOAT: |
| 219 | rcSrc = kFPReg; |
| 220 | op = kX86Cvtsd2ssRR; |
| 221 | break; |
| 222 | case Instruction::FLOAT_TO_DOUBLE: |
| 223 | rcSrc = kFPReg; |
| 224 | op = kX86Cvtss2sdRR; |
| 225 | break; |
| 226 | case Instruction::INT_TO_DOUBLE: |
| 227 | rcSrc = kCoreReg; |
| 228 | op = kX86Cvtsi2sdRR; |
| 229 | break; |
| 230 | case Instruction::FLOAT_TO_INT: { |
| 231 | rl_src = LoadValue(rl_src, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 232 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 233 | ClobberSReg(rl_dest.s_reg_low); |
| 234 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 235 | RegStorage temp_reg = AllocTempSingle(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 236 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 237 | LoadConstant(rl_result.reg, 0x7fffffff); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 238 | NewLIR2(kX86Cvtsi2ssRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 239 | NewLIR2(kX86ComissRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 240 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 241 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 242 | NewLIR2(kX86Cvttss2siRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 243 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 244 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 245 | NewLIR2(kX86Xor32RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 246 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 247 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 248 | StoreValue(rl_dest, rl_result); |
| 249 | return; |
| 250 | } |
| 251 | case Instruction::DOUBLE_TO_INT: { |
| 252 | rl_src = LoadValueWide(rl_src, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 253 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 254 | ClobberSReg(rl_dest.s_reg_low); |
| 255 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 256 | RegStorage temp_reg = AllocTempDouble(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 257 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 258 | LoadConstant(rl_result.reg, 0x7fffffff); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 259 | NewLIR2(kX86Cvtsi2sdRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 260 | NewLIR2(kX86ComisdRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 261 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 262 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 263 | NewLIR2(kX86Cvttsd2siRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 264 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 265 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 266 | NewLIR2(kX86Xor32RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 267 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 268 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 269 | StoreValue(rl_dest, rl_result); |
| 270 | return; |
| 271 | } |
| 272 | case Instruction::LONG_TO_DOUBLE: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 273 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 274 | rcSrc = kCoreReg; |
| 275 | op = kX86Cvtsqi2sdRR; |
| 276 | break; |
| 277 | } |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 278 | GenLongToFP(rl_dest, rl_src, true /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 279 | return; |
| 280 | case Instruction::LONG_TO_FLOAT: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 281 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 282 | rcSrc = kCoreReg; |
| 283 | op = kX86Cvtsqi2ssRR; |
| 284 | break; |
| 285 | } |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 286 | GenLongToFP(rl_dest, rl_src, false /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 287 | return; |
| 288 | case Instruction::FLOAT_TO_LONG: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 289 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 290 | rl_src = LoadValue(rl_src, kFPReg); |
| 291 | // If result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 292 | ClobberSReg(rl_dest.s_reg_low); |
| 293 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 294 | RegStorage temp_reg = AllocTempSingle(); |
| 295 | |
| 296 | // Set 0x7fffffffffffffff to rl_result |
| 297 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 298 | NewLIR2(kX86Cvtsqi2ssRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 299 | NewLIR2(kX86ComissRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 300 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 301 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 302 | NewLIR2(kX86Cvttss2sqiRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 303 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 304 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
| 305 | NewLIR2(kX86Xor64RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
| 306 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 307 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 308 | StoreValueWide(rl_dest, rl_result); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 309 | } else { |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 310 | GenConversionCall(kQuickF2l, rl_dest, rl_src); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 311 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 312 | return; |
| 313 | case Instruction::DOUBLE_TO_LONG: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 314 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 315 | rl_src = LoadValueWide(rl_src, kFPReg); |
| 316 | // If result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 317 | ClobberSReg(rl_dest.s_reg_low); |
| 318 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 319 | RegStorage temp_reg = AllocTempDouble(); |
| 320 | |
| 321 | // Set 0x7fffffffffffffff to rl_result |
| 322 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 323 | NewLIR2(kX86Cvtsqi2sdRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 324 | NewLIR2(kX86ComisdRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 325 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 326 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 327 | NewLIR2(kX86Cvttsd2sqiRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 328 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 329 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
| 330 | NewLIR2(kX86Xor64RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
| 331 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 332 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 333 | StoreValueWide(rl_dest, rl_result); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 334 | } else { |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 335 | GenConversionCall(kQuickD2l, rl_dest, rl_src); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 336 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 337 | return; |
| 338 | default: |
| 339 | LOG(INFO) << "Unexpected opcode: " << opcode; |
| 340 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 341 | // At this point, target will be either float or double. |
| 342 | DCHECK(rl_dest.fp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 343 | if (rl_src.wide) { |
| 344 | rl_src = LoadValueWide(rl_src, rcSrc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 345 | } else { |
| 346 | rl_src = LoadValue(rl_src, rcSrc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 347 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 348 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 349 | NewLIR2(op, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 350 | if (rl_dest.wide) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 351 | StoreValueWide(rl_dest, rl_result); |
| 352 | } else { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 353 | StoreValue(rl_dest, rl_result); |
| 354 | } |
| 355 | } |
| 356 | |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 357 | void X86Mir2Lir::GenRemFP(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2, bool is_double) { |
| 358 | // Compute offsets to the source and destination VRs on stack. |
| 359 | int src1_v_reg_offset = SRegOffset(rl_src1.s_reg_low); |
| 360 | int src2_v_reg_offset = SRegOffset(rl_src2.s_reg_low); |
| 361 | int dest_v_reg_offset = SRegOffset(rl_dest.s_reg_low); |
| 362 | |
| 363 | // Update the in-register state of sources. |
| 364 | rl_src1 = is_double ? UpdateLocWide(rl_src1) : UpdateLoc(rl_src1); |
| 365 | rl_src2 = is_double ? UpdateLocWide(rl_src2) : UpdateLoc(rl_src2); |
| 366 | |
| 367 | // All memory accesses below reference dalvik regs. |
| 368 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
| 369 | |
| 370 | // If the source is in physical register, then put it in its location on stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 371 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 372 | if (rl_src1.location == kLocPhysReg) { |
| 373 | RegisterInfo* reg_info = GetRegInfo(rl_src1.reg); |
| 374 | |
| 375 | if (reg_info != nullptr && reg_info->IsTemp()) { |
| 376 | // Calling FlushSpecificReg because it will only write back VR if it is dirty. |
| 377 | FlushSpecificReg(reg_info); |
| 378 | // ResetDef to prevent NullifyRange from removing stores. |
| 379 | ResetDef(rl_src1.reg); |
| 380 | } else { |
| 381 | // It must have been register promoted if it is not a temp but is still in physical |
| 382 | // register. Since we need it to be in memory to convert, we place it there now. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 383 | StoreBaseDisp(rs_rSP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 384 | kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 385 | } |
| 386 | } |
| 387 | |
| 388 | if (rl_src2.location == kLocPhysReg) { |
| 389 | RegisterInfo* reg_info = GetRegInfo(rl_src2.reg); |
| 390 | if (reg_info != nullptr && reg_info->IsTemp()) { |
| 391 | FlushSpecificReg(reg_info); |
| 392 | ResetDef(rl_src2.reg); |
| 393 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 394 | StoreBaseDisp(rs_rSP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 395 | kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 396 | } |
| 397 | } |
| 398 | |
| 399 | int fld_opcode = is_double ? kX86Fld64M : kX86Fld32M; |
| 400 | |
| 401 | // Push the source virtual registers onto the x87 stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 402 | LIR *fld_2 = NewLIR2NoDest(fld_opcode, rs_rSP.GetReg(), |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 403 | src2_v_reg_offset + LOWORD_OFFSET); |
| 404 | AnnotateDalvikRegAccess(fld_2, (src2_v_reg_offset + LOWORD_OFFSET) >> 2, |
| 405 | true /* is_load */, is_double /* is64bit */); |
| 406 | |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 407 | LIR *fld_1 = NewLIR2NoDest(fld_opcode, rs_rSP.GetReg(), |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 408 | src1_v_reg_offset + LOWORD_OFFSET); |
| 409 | AnnotateDalvikRegAccess(fld_1, (src1_v_reg_offset + LOWORD_OFFSET) >> 2, |
| 410 | true /* is_load */, is_double /* is64bit */); |
| 411 | |
| 412 | FlushReg(rs_rAX); |
| 413 | Clobber(rs_rAX); |
| 414 | LockTemp(rs_rAX); |
| 415 | |
| 416 | LIR* retry = NewLIR0(kPseudoTargetLabel); |
| 417 | |
| 418 | // Divide ST(0) by ST(1) and place result to ST(0). |
| 419 | NewLIR0(kX86Fprem); |
| 420 | |
| 421 | // Move FPU status word to AX. |
| 422 | NewLIR0(kX86Fstsw16R); |
| 423 | |
| 424 | // Check if reduction is complete. |
| 425 | OpRegImm(kOpAnd, rs_rAX, 0x400); |
| 426 | |
| 427 | // If no then continue to compute remainder. |
| 428 | LIR* branch = NewLIR2(kX86Jcc8, 0, kX86CondNe); |
| 429 | branch->target = retry; |
| 430 | |
| 431 | FreeTemp(rs_rAX); |
| 432 | |
| 433 | // Now store result in the destination VR's stack location. |
| 434 | int displacement = dest_v_reg_offset + LOWORD_OFFSET; |
| 435 | int opcode = is_double ? kX86Fst64M : kX86Fst32M; |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 436 | LIR *fst = NewLIR2NoDest(opcode, rs_rSP.GetReg(), displacement); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 437 | AnnotateDalvikRegAccess(fst, displacement >> 2, false /* is_load */, is_double /* is64bit */); |
| 438 | |
| 439 | // Pop ST(1) and ST(0). |
| 440 | NewLIR0(kX86Fucompp); |
| 441 | |
| 442 | /* |
| 443 | * The result is in a physical register if it was in a temp or was register |
| 444 | * promoted. For that reason it is enough to check if it is in physical |
| 445 | * register. If it is, then we must do all of the bookkeeping necessary to |
| 446 | * invalidate temp (if needed) and load in promoted register (if needed). |
| 447 | * If the result's location is in memory, then we do not need to do anything |
| 448 | * more since the fstp has already placed the correct value in memory. |
| 449 | */ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 450 | RegLocation rl_result = is_double ? UpdateLocWideTyped(rl_dest) : UpdateLocTyped(rl_dest); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 451 | if (rl_result.location == kLocPhysReg) { |
| 452 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 453 | if (is_double) { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 454 | LoadBaseDisp(rs_rSP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 455 | StoreFinalValueWide(rl_dest, rl_result); |
| 456 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 457 | Load32Disp(rs_rSP, dest_v_reg_offset, rl_result.reg); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 458 | StoreFinalValue(rl_dest, rl_result); |
| 459 | } |
| 460 | } |
| 461 | } |
| 462 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 463 | void X86Mir2Lir::GenCmpFP(Instruction::Code code, RegLocation rl_dest, |
| 464 | RegLocation rl_src1, RegLocation rl_src2) { |
| 465 | bool single = (code == Instruction::CMPL_FLOAT) || (code == Instruction::CMPG_FLOAT); |
| 466 | bool unordered_gt = (code == Instruction::CMPG_DOUBLE) || (code == Instruction::CMPG_FLOAT); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 467 | if (single) { |
| 468 | rl_src1 = LoadValue(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 469 | rl_src2 = LoadValue(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 470 | } else { |
| 471 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 472 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 473 | } |
| 474 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 475 | ClobberSReg(rl_dest.s_reg_low); |
| 476 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 477 | LoadConstantNoClobber(rl_result.reg, unordered_gt ? 1 : 0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 478 | if (single) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 479 | NewLIR2(kX86UcomissRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 480 | } else { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 481 | NewLIR2(kX86UcomisdRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 482 | } |
| 483 | LIR* branch = NULL; |
| 484 | if (unordered_gt) { |
| 485 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 486 | } |
| 487 | // If the result reg can't be byte accessed, use a jump and move instead of a set. |
Chao-ying Fu | 7e399fd | 2014-06-10 18:11:11 -0700 | [diff] [blame] | 488 | if (!IsByteRegister(rl_result.reg)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 489 | LIR* branch2 = NULL; |
| 490 | if (unordered_gt) { |
| 491 | branch2 = NewLIR2(kX86Jcc8, 0, kX86CondA); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 492 | NewLIR2(kX86Mov32RI, rl_result.reg.GetReg(), 0x0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 493 | } else { |
| 494 | branch2 = NewLIR2(kX86Jcc8, 0, kX86CondBe); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 495 | NewLIR2(kX86Mov32RI, rl_result.reg.GetReg(), 0x1); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 496 | } |
| 497 | branch2->target = NewLIR0(kPseudoTargetLabel); |
| 498 | } else { |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 499 | NewLIR2(kX86Set8R, rl_result.reg.GetReg(), kX86CondA /* above - unsigned > */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 500 | } |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 501 | NewLIR2(kX86Sbb32RI, rl_result.reg.GetReg(), 0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 502 | if (unordered_gt) { |
| 503 | branch->target = NewLIR0(kPseudoTargetLabel); |
| 504 | } |
| 505 | StoreValue(rl_dest, rl_result); |
| 506 | } |
| 507 | |
| 508 | void X86Mir2Lir::GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, |
| 509 | bool is_double) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 510 | LIR* taken = &block_label_list_[bb->taken]; |
| 511 | LIR* not_taken = &block_label_list_[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 512 | LIR* branch = NULL; |
| 513 | RegLocation rl_src1; |
| 514 | RegLocation rl_src2; |
| 515 | if (is_double) { |
| 516 | rl_src1 = mir_graph_->GetSrcWide(mir, 0); |
| 517 | rl_src2 = mir_graph_->GetSrcWide(mir, 2); |
| 518 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
| 519 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 520 | NewLIR2(kX86UcomisdRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 521 | } else { |
| 522 | rl_src1 = mir_graph_->GetSrc(mir, 0); |
| 523 | rl_src2 = mir_graph_->GetSrc(mir, 1); |
| 524 | rl_src1 = LoadValue(rl_src1, kFPReg); |
| 525 | rl_src2 = LoadValue(rl_src2, kFPReg); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 526 | NewLIR2(kX86UcomissRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 527 | } |
Vladimir Marko | a894607 | 2014-01-22 10:30:44 +0000 | [diff] [blame] | 528 | ConditionCode ccode = mir->meta.ccode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 529 | switch (ccode) { |
| 530 | case kCondEq: |
| 531 | if (!gt_bias) { |
| 532 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 533 | branch->target = not_taken; |
| 534 | } |
| 535 | break; |
| 536 | case kCondNe: |
| 537 | if (!gt_bias) { |
| 538 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 539 | branch->target = taken; |
| 540 | } |
| 541 | break; |
| 542 | case kCondLt: |
| 543 | if (gt_bias) { |
| 544 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 545 | branch->target = not_taken; |
| 546 | } |
Vladimir Marko | 58af1f9 | 2013-12-19 13:31:15 +0000 | [diff] [blame] | 547 | ccode = kCondUlt; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 548 | break; |
| 549 | case kCondLe: |
| 550 | if (gt_bias) { |
| 551 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 552 | branch->target = not_taken; |
| 553 | } |
| 554 | ccode = kCondLs; |
| 555 | break; |
| 556 | case kCondGt: |
| 557 | if (gt_bias) { |
| 558 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 559 | branch->target = taken; |
| 560 | } |
| 561 | ccode = kCondHi; |
| 562 | break; |
| 563 | case kCondGe: |
| 564 | if (gt_bias) { |
| 565 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 566 | branch->target = taken; |
| 567 | } |
Vladimir Marko | 58af1f9 | 2013-12-19 13:31:15 +0000 | [diff] [blame] | 568 | ccode = kCondUge; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 569 | break; |
| 570 | default: |
| 571 | LOG(FATAL) << "Unexpected ccode: " << ccode; |
| 572 | } |
| 573 | OpCondBranch(ccode, taken); |
| 574 | } |
| 575 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 576 | void X86Mir2Lir::GenNegFloat(RegLocation rl_dest, RegLocation rl_src) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 577 | RegLocation rl_result; |
| 578 | rl_src = LoadValue(rl_src, kCoreReg); |
| 579 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 580 | OpRegRegImm(kOpAdd, rl_result.reg, rl_src.reg, 0x80000000); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 581 | StoreValue(rl_dest, rl_result); |
| 582 | } |
| 583 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 584 | void X86Mir2Lir::GenNegDouble(RegLocation rl_dest, RegLocation rl_src) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 585 | RegLocation rl_result; |
| 586 | rl_src = LoadValueWide(rl_src, kCoreReg); |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 587 | if (cu_->target64) { |
Pavel Vyssotski | d4812a9 | 2014-11-11 12:37:56 +0600 | [diff] [blame] | 588 | rl_result = EvalLocWide(rl_dest, kCoreReg, true); |
Alexei Zavjalov | 02959ea | 2014-06-18 17:18:36 +0700 | [diff] [blame] | 589 | OpRegCopy(rl_result.reg, rl_src.reg); |
| 590 | // Flip sign bit. |
| 591 | NewLIR2(kX86Rol64RI, rl_result.reg.GetReg(), 1); |
| 592 | NewLIR2(kX86Xor64RI, rl_result.reg.GetReg(), 1); |
| 593 | NewLIR2(kX86Ror64RI, rl_result.reg.GetReg(), 1); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 594 | } else { |
Pavel Vyssotski | d4812a9 | 2014-11-11 12:37:56 +0600 | [diff] [blame] | 595 | rl_result = ForceTempWide(rl_src); |
| 596 | OpRegRegImm(kOpAdd, rl_result.reg.GetHigh(), rl_result.reg.GetHigh(), 0x80000000); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 597 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 598 | StoreValueWide(rl_dest, rl_result); |
| 599 | } |
| 600 | |
| 601 | bool X86Mir2Lir::GenInlinedSqrt(CallInfo* info) { |
Mark Mendell | bff1ef0 | 2013-12-13 13:47:34 -0800 | [diff] [blame] | 602 | RegLocation rl_src = info->args[0]; |
| 603 | RegLocation rl_dest = InlineTargetWide(info); // double place for result |
| 604 | rl_src = LoadValueWide(rl_src, kFPReg); |
| 605 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 606 | NewLIR2(kX86SqrtsdRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Mark Mendell | bff1ef0 | 2013-12-13 13:47:34 -0800 | [diff] [blame] | 607 | StoreValueWide(rl_dest, rl_result); |
| 608 | return true; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 609 | } |
| 610 | |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 611 | bool X86Mir2Lir::GenInlinedAbsFloat(CallInfo* info) { |
| 612 | // Get the argument |
| 613 | RegLocation rl_src = info->args[0]; |
| 614 | |
| 615 | // Get the inlined intrinsic target virtual register |
| 616 | RegLocation rl_dest = InlineTarget(info); |
| 617 | |
| 618 | // Get the virtual register number |
| 619 | DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); |
| 620 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 621 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 622 | return true; |
| 623 | } |
| 624 | int v_src_reg = mir_graph_->SRegToVReg(rl_src.s_reg_low); |
| 625 | int v_dst_reg = mir_graph_->SRegToVReg(rl_dest.s_reg_low); |
| 626 | |
| 627 | // if argument is the same as inlined intrinsic target |
| 628 | if (v_src_reg == v_dst_reg) { |
| 629 | rl_src = UpdateLoc(rl_src); |
| 630 | |
| 631 | // if argument is in the physical register |
| 632 | if (rl_src.location == kLocPhysReg) { |
| 633 | rl_src = LoadValue(rl_src, kCoreReg); |
| 634 | OpRegImm(kOpAnd, rl_src.reg, 0x7fffffff); |
| 635 | StoreValue(rl_dest, rl_src); |
| 636 | return true; |
| 637 | } |
| 638 | // the argument is in memory |
| 639 | DCHECK((rl_src.location == kLocDalvikFrame) || |
| 640 | (rl_src.location == kLocCompilerTemp)); |
| 641 | |
| 642 | // Operate directly into memory. |
| 643 | int displacement = SRegOffset(rl_dest.s_reg_low); |
| 644 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 645 | LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement, 0x7fffffff); |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 646 | AnnotateDalvikRegAccess(lir, displacement >> 2, false /*is_load */, false /* is_64bit */); |
| 647 | AnnotateDalvikRegAccess(lir, displacement >> 2, true /* is_load */, false /* is_64bit*/); |
| 648 | return true; |
| 649 | } else { |
| 650 | rl_src = LoadValue(rl_src, kCoreReg); |
| 651 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 652 | OpRegRegImm(kOpAnd, rl_result.reg, rl_src.reg, 0x7fffffff); |
| 653 | StoreValue(rl_dest, rl_result); |
| 654 | return true; |
| 655 | } |
| 656 | } |
| 657 | |
| 658 | bool X86Mir2Lir::GenInlinedAbsDouble(CallInfo* info) { |
| 659 | RegLocation rl_src = info->args[0]; |
| 660 | RegLocation rl_dest = InlineTargetWide(info); |
| 661 | DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); |
| 662 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 663 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 664 | return true; |
| 665 | } |
nikolay serdjuk | c5e4ce1 | 2014-06-10 17:07:10 +0700 | [diff] [blame] | 666 | if (cu_->target64) { |
| 667 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 668 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 669 | OpRegCopyWide(rl_result.reg, rl_src.reg); |
| 670 | OpRegImm(kOpLsl, rl_result.reg, 1); |
| 671 | OpRegImm(kOpLsr, rl_result.reg, 1); |
| 672 | StoreValueWide(rl_dest, rl_result); |
| 673 | return true; |
| 674 | } |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 675 | int v_src_reg = mir_graph_->SRegToVReg(rl_src.s_reg_low); |
| 676 | int v_dst_reg = mir_graph_->SRegToVReg(rl_dest.s_reg_low); |
| 677 | rl_src = UpdateLocWide(rl_src); |
| 678 | |
| 679 | // if argument is in the physical XMM register |
| 680 | if (rl_src.location == kLocPhysReg && rl_src.reg.IsFloat()) { |
| 681 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 682 | if (rl_result.reg != rl_src.reg) { |
| 683 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 684 | NewLIR2(kX86PandRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 685 | } else { |
| 686 | RegStorage sign_mask = AllocTempDouble(); |
| 687 | LoadConstantWide(sign_mask, 0x7fffffffffffffff); |
| 688 | NewLIR2(kX86PandRR, rl_result.reg.GetReg(), sign_mask.GetReg()); |
| 689 | FreeTemp(sign_mask); |
| 690 | } |
| 691 | StoreValueWide(rl_dest, rl_result); |
| 692 | return true; |
| 693 | } else if (v_src_reg == v_dst_reg) { |
| 694 | // if argument is the same as inlined intrinsic target |
| 695 | // if argument is in the physical register |
| 696 | if (rl_src.location == kLocPhysReg) { |
| 697 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 698 | OpRegImm(kOpAnd, rl_src.reg.GetHigh(), 0x7fffffff); |
| 699 | StoreValueWide(rl_dest, rl_src); |
| 700 | return true; |
| 701 | } |
| 702 | // the argument is in memory |
| 703 | DCHECK((rl_src.location == kLocDalvikFrame) || |
| 704 | (rl_src.location == kLocCompilerTemp)); |
| 705 | |
| 706 | // Operate directly into memory. |
| 707 | int displacement = SRegOffset(rl_dest.s_reg_low); |
| 708 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 709 | LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement + HIWORD_OFFSET, 0x7fffffff); |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 710 | AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, true /* is_load */, true /* is_64bit*/); |
| 711 | AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, false /*is_load */, true /* is_64bit */); |
| 712 | return true; |
| 713 | } else { |
| 714 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 715 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 716 | OpRegCopyWide(rl_result.reg, rl_src.reg); |
| 717 | OpRegImm(kOpAnd, rl_result.reg.GetHigh(), 0x7fffffff); |
| 718 | StoreValueWide(rl_dest, rl_result); |
| 719 | return true; |
| 720 | } |
| 721 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 722 | |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 723 | bool X86Mir2Lir::GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double) { |
| 724 | if (is_double) { |
| 725 | RegLocation rl_src1 = LoadValueWide(info->args[0], kFPReg); |
| 726 | RegLocation rl_src2 = LoadValueWide(info->args[2], kFPReg); |
| 727 | RegLocation rl_dest = InlineTargetWide(info); |
| 728 | RegLocation rl_result = EvalLocWide(rl_dest, kFPReg, true); |
| 729 | |
| 730 | // Avoid src2 corruption by OpRegCopyWide. |
| 731 | if (rl_result.reg == rl_src2.reg) { |
| 732 | std::swap(rl_src2.reg, rl_src1.reg); |
| 733 | } |
| 734 | |
| 735 | OpRegCopyWide(rl_result.reg, rl_src1.reg); |
| 736 | NewLIR2(kX86UcomisdRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 737 | // If either arg is NaN, return NaN. |
| 738 | LIR* branch_nan = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 739 | // Min/Max branches. |
| 740 | LIR* branch_cond1 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondA : kX86CondB); |
| 741 | LIR* branch_cond2 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondB : kX86CondA); |
| 742 | // If equal, we need to resolve situations like min/max(0.0, -0.0) == -0.0/0.0. |
| 743 | NewLIR2((is_min) ? kX86OrpdRR : kX86AndpdRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 744 | LIR* branch_exit_equal = NewLIR1(kX86Jmp8, 0); |
| 745 | // Handle NaN. |
| 746 | branch_nan->target = NewLIR0(kPseudoTargetLabel); |
| 747 | LoadConstantWide(rl_result.reg, INT64_C(0x7ff8000000000000)); |
Razvan A Lupusoru | e5beb18 | 2014-08-14 13:49:57 +0800 | [diff] [blame] | 748 | |
| 749 | // The base_of_code_ compiler temp is non-null when it is reserved |
| 750 | // for being able to do data accesses relative to method start. |
| 751 | if (base_of_code_ != nullptr) { |
| 752 | // Loading from the constant pool may have used base of code register. |
| 753 | // However, the code here generates logic in diamond shape and not all |
| 754 | // paths load base of code register. Therefore, we ensure it is clobbered so |
| 755 | // that the temp caching system does not believe it is live at merge point. |
| 756 | RegLocation rl_method = mir_graph_->GetRegLocation(base_of_code_->s_reg_low); |
| 757 | if (rl_method.wide) { |
| 758 | rl_method = UpdateLocWide(rl_method); |
| 759 | } else { |
| 760 | rl_method = UpdateLoc(rl_method); |
| 761 | } |
| 762 | if (rl_method.location == kLocPhysReg) { |
| 763 | Clobber(rl_method.reg); |
| 764 | } |
| 765 | } |
| 766 | |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 767 | LIR* branch_exit_nan = NewLIR1(kX86Jmp8, 0); |
| 768 | // Handle Min/Max. Copy greater/lesser value from src2. |
| 769 | branch_cond1->target = NewLIR0(kPseudoTargetLabel); |
| 770 | OpRegCopyWide(rl_result.reg, rl_src2.reg); |
| 771 | // Right operand is already in result reg. |
| 772 | branch_cond2->target = NewLIR0(kPseudoTargetLabel); |
| 773 | // Exit. |
| 774 | branch_exit_nan->target = NewLIR0(kPseudoTargetLabel); |
| 775 | branch_exit_equal->target = NewLIR0(kPseudoTargetLabel); |
| 776 | StoreValueWide(rl_dest, rl_result); |
| 777 | } else { |
| 778 | RegLocation rl_src1 = LoadValue(info->args[0], kFPReg); |
| 779 | RegLocation rl_src2 = LoadValue(info->args[1], kFPReg); |
| 780 | RegLocation rl_dest = InlineTarget(info); |
| 781 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 782 | |
| 783 | // Avoid src2 corruption by OpRegCopyWide. |
| 784 | if (rl_result.reg == rl_src2.reg) { |
| 785 | std::swap(rl_src2.reg, rl_src1.reg); |
| 786 | } |
| 787 | |
| 788 | OpRegCopy(rl_result.reg, rl_src1.reg); |
| 789 | NewLIR2(kX86UcomissRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 790 | // If either arg is NaN, return NaN. |
| 791 | LIR* branch_nan = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 792 | // Min/Max branches. |
| 793 | LIR* branch_cond1 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondA : kX86CondB); |
| 794 | LIR* branch_cond2 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondB : kX86CondA); |
| 795 | // If equal, we need to resolve situations like min/max(0.0, -0.0) == -0.0/0.0. |
| 796 | NewLIR2((is_min) ? kX86OrpsRR : kX86AndpsRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 797 | LIR* branch_exit_equal = NewLIR1(kX86Jmp8, 0); |
| 798 | // Handle NaN. |
| 799 | branch_nan->target = NewLIR0(kPseudoTargetLabel); |
| 800 | LoadConstantNoClobber(rl_result.reg, 0x7fc00000); |
| 801 | LIR* branch_exit_nan = NewLIR1(kX86Jmp8, 0); |
| 802 | // Handle Min/Max. Copy greater/lesser value from src2. |
| 803 | branch_cond1->target = NewLIR0(kPseudoTargetLabel); |
| 804 | OpRegCopy(rl_result.reg, rl_src2.reg); |
| 805 | // Right operand is already in result reg. |
| 806 | branch_cond2->target = NewLIR0(kPseudoTargetLabel); |
| 807 | // Exit. |
| 808 | branch_exit_nan->target = NewLIR0(kPseudoTargetLabel); |
| 809 | branch_exit_equal->target = NewLIR0(kPseudoTargetLabel); |
| 810 | StoreValue(rl_dest, rl_result); |
| 811 | } |
| 812 | return true; |
| 813 | } |
| 814 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 815 | } // namespace art |