Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "codegen_x86.h" |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 18 | |
| 19 | #include "base/logging.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "dex/quick/mir_to_lir-inl.h" |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 21 | #include "dex/reg_storage_eq.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 22 | #include "x86_lir.h" |
| 23 | |
| 24 | namespace art { |
| 25 | |
| 26 | void X86Mir2Lir::GenArithOpFloat(Instruction::Code opcode, |
| 27 | RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) { |
| 28 | X86OpCode op = kX86Nop; |
| 29 | RegLocation rl_result; |
| 30 | |
| 31 | /* |
| 32 | * Don't attempt to optimize register usage since these opcodes call out to |
| 33 | * the handlers. |
| 34 | */ |
| 35 | switch (opcode) { |
| 36 | case Instruction::ADD_FLOAT_2ADDR: |
| 37 | case Instruction::ADD_FLOAT: |
| 38 | op = kX86AddssRR; |
| 39 | break; |
| 40 | case Instruction::SUB_FLOAT_2ADDR: |
| 41 | case Instruction::SUB_FLOAT: |
| 42 | op = kX86SubssRR; |
| 43 | break; |
| 44 | case Instruction::DIV_FLOAT_2ADDR: |
| 45 | case Instruction::DIV_FLOAT: |
| 46 | op = kX86DivssRR; |
| 47 | break; |
| 48 | case Instruction::MUL_FLOAT_2ADDR: |
| 49 | case Instruction::MUL_FLOAT: |
| 50 | op = kX86MulssRR; |
| 51 | break; |
| 52 | case Instruction::REM_FLOAT_2ADDR: |
| 53 | case Instruction::REM_FLOAT: |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 54 | GenRemFP(rl_dest, rl_src1, rl_src2, false /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 55 | return; |
| 56 | case Instruction::NEG_FLOAT: |
| 57 | GenNegFloat(rl_dest, rl_src1); |
| 58 | return; |
| 59 | default: |
| 60 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 61 | } |
| 62 | rl_src1 = LoadValue(rl_src1, kFPReg); |
| 63 | rl_src2 = LoadValue(rl_src2, kFPReg); |
| 64 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 65 | RegStorage r_dest = rl_result.reg; |
| 66 | RegStorage r_src1 = rl_src1.reg; |
| 67 | RegStorage r_src2 = rl_src2.reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 68 | if (r_dest == r_src2) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 69 | r_src2 = AllocTempSingle(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 70 | OpRegCopy(r_src2, r_dest); |
| 71 | } |
| 72 | OpRegCopy(r_dest, r_src1); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 73 | NewLIR2(op, r_dest.GetReg(), r_src2.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 74 | StoreValue(rl_dest, rl_result); |
| 75 | } |
| 76 | |
| 77 | void X86Mir2Lir::GenArithOpDouble(Instruction::Code opcode, |
| 78 | RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 79 | DCHECK(rl_dest.wide); |
| 80 | DCHECK(rl_dest.fp); |
| 81 | DCHECK(rl_src1.wide); |
| 82 | DCHECK(rl_src1.fp); |
| 83 | DCHECK(rl_src2.wide); |
| 84 | DCHECK(rl_src2.fp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 85 | X86OpCode op = kX86Nop; |
| 86 | RegLocation rl_result; |
| 87 | |
| 88 | switch (opcode) { |
| 89 | case Instruction::ADD_DOUBLE_2ADDR: |
| 90 | case Instruction::ADD_DOUBLE: |
| 91 | op = kX86AddsdRR; |
| 92 | break; |
| 93 | case Instruction::SUB_DOUBLE_2ADDR: |
| 94 | case Instruction::SUB_DOUBLE: |
| 95 | op = kX86SubsdRR; |
| 96 | break; |
| 97 | case Instruction::DIV_DOUBLE_2ADDR: |
| 98 | case Instruction::DIV_DOUBLE: |
| 99 | op = kX86DivsdRR; |
| 100 | break; |
| 101 | case Instruction::MUL_DOUBLE_2ADDR: |
| 102 | case Instruction::MUL_DOUBLE: |
| 103 | op = kX86MulsdRR; |
| 104 | break; |
| 105 | case Instruction::REM_DOUBLE_2ADDR: |
| 106 | case Instruction::REM_DOUBLE: |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 107 | GenRemFP(rl_dest, rl_src1, rl_src2, true /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 108 | return; |
| 109 | case Instruction::NEG_DOUBLE: |
| 110 | GenNegDouble(rl_dest, rl_src1); |
| 111 | return; |
| 112 | default: |
| 113 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 114 | } |
| 115 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 116 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 117 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 118 | if (rl_result.reg == rl_src2.reg) { |
| 119 | rl_src2.reg = AllocTempDouble(); |
| 120 | OpRegCopy(rl_src2.reg, rl_result.reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 121 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 122 | OpRegCopy(rl_result.reg, rl_src1.reg); |
| 123 | NewLIR2(op, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 124 | StoreValueWide(rl_dest, rl_result); |
| 125 | } |
| 126 | |
Ningsheng Jian | 675e09b | 2014-10-23 13:48:36 +0800 | [diff] [blame] | 127 | void X86Mir2Lir::GenMultiplyByConstantFloat(RegLocation rl_dest, RegLocation rl_src1, |
| 128 | int32_t constant) { |
| 129 | // TODO: need x86 implementation. |
| 130 | UNUSED(rl_dest, rl_src1, constant); |
| 131 | LOG(FATAL) << "Unimplemented GenMultiplyByConstantFloat in x86"; |
| 132 | } |
| 133 | |
| 134 | void X86Mir2Lir::GenMultiplyByConstantDouble(RegLocation rl_dest, RegLocation rl_src1, |
| 135 | int64_t constant) { |
| 136 | // TODO: need x86 implementation. |
| 137 | UNUSED(rl_dest, rl_src1, constant); |
| 138 | LOG(FATAL) << "Unimplemented GenMultiplyByConstantDouble in x86"; |
| 139 | } |
| 140 | |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 141 | void X86Mir2Lir::GenLongToFP(RegLocation rl_dest, RegLocation rl_src, bool is_double) { |
| 142 | // Compute offsets to the source and destination VRs on stack |
| 143 | int src_v_reg_offset = SRegOffset(rl_src.s_reg_low); |
| 144 | int dest_v_reg_offset = SRegOffset(rl_dest.s_reg_low); |
| 145 | |
| 146 | // Update the in-register state of source. |
| 147 | rl_src = UpdateLocWide(rl_src); |
| 148 | |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 149 | // All memory accesses below reference dalvik regs. |
| 150 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
| 151 | |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 152 | // If the source is in physical register, then put it in its location on stack. |
| 153 | if (rl_src.location == kLocPhysReg) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 154 | RegisterInfo* reg_info = GetRegInfo(rl_src.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 155 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 156 | if (reg_info != nullptr && reg_info->IsTemp()) { |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 157 | // Calling FlushSpecificReg because it will only write back VR if it is dirty. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 158 | FlushSpecificReg(reg_info); |
| 159 | // ResetDef to prevent NullifyRange from removing stores. |
| 160 | ResetDef(rl_src.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 161 | } else { |
| 162 | // It must have been register promoted if it is not a temp but is still in physical |
| 163 | // register. Since we need it to be in memory to convert, we place it there now. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 164 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
| 165 | StoreBaseDisp(rs_rSP, src_v_reg_offset, rl_src.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 166 | } |
| 167 | } |
| 168 | |
| 169 | // Push the source virtual register onto the x87 stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 170 | LIR *fild64 = NewLIR2NoDest(kX86Fild64M, rs_rX86_SP_32.GetReg(), |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 171 | src_v_reg_offset + LOWORD_OFFSET); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 172 | AnnotateDalvikRegAccess(fild64, (src_v_reg_offset + LOWORD_OFFSET) >> 2, |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 173 | true /* is_load */, true /* is64bit */); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 174 | |
| 175 | // Now pop off x87 stack and store it in the destination VR's stack location. |
| 176 | int opcode = is_double ? kX86Fstp64M : kX86Fstp32M; |
| 177 | int displacement = is_double ? dest_v_reg_offset + LOWORD_OFFSET : dest_v_reg_offset; |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 178 | LIR *fstp = NewLIR2NoDest(opcode, rs_rX86_SP_32.GetReg(), displacement); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 179 | AnnotateDalvikRegAccess(fstp, displacement >> 2, false /* is_load */, is_double); |
| 180 | |
| 181 | /* |
| 182 | * The result is in a physical register if it was in a temp or was register |
| 183 | * promoted. For that reason it is enough to check if it is in physical |
| 184 | * register. If it is, then we must do all of the bookkeeping necessary to |
| 185 | * invalidate temp (if needed) and load in promoted register (if needed). |
| 186 | * If the result's location is in memory, then we do not need to do anything |
| 187 | * more since the fstp has already placed the correct value in memory. |
| 188 | */ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 189 | RegLocation rl_result = is_double ? UpdateLocWideTyped(rl_dest) : UpdateLocTyped(rl_dest); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 190 | if (rl_result.location == kLocPhysReg) { |
| 191 | /* |
| 192 | * We already know that the result is in a physical register but do not know if it is the |
| 193 | * right class. So we call EvalLoc(Wide) first which will ensure that it will get moved to the |
| 194 | * correct register class. |
| 195 | */ |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 196 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 197 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 198 | if (is_double) { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 199 | LoadBaseDisp(rs_rSP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 200 | |
Maxim Kazantsev | 51a80d7 | 2014-03-06 11:33:26 +0700 | [diff] [blame] | 201 | StoreFinalValueWide(rl_dest, rl_result); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 202 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 203 | Load32Disp(rs_rSP, dest_v_reg_offset, rl_result.reg); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 204 | |
Maxim Kazantsev | 51a80d7 | 2014-03-06 11:33:26 +0700 | [diff] [blame] | 205 | StoreFinalValue(rl_dest, rl_result); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 206 | } |
| 207 | } |
| 208 | } |
| 209 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 210 | void X86Mir2Lir::GenConversion(Instruction::Code opcode, RegLocation rl_dest, |
| 211 | RegLocation rl_src) { |
| 212 | RegisterClass rcSrc = kFPReg; |
| 213 | X86OpCode op = kX86Nop; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 214 | RegLocation rl_result; |
| 215 | switch (opcode) { |
| 216 | case Instruction::INT_TO_FLOAT: |
| 217 | rcSrc = kCoreReg; |
| 218 | op = kX86Cvtsi2ssRR; |
| 219 | break; |
| 220 | case Instruction::DOUBLE_TO_FLOAT: |
| 221 | rcSrc = kFPReg; |
| 222 | op = kX86Cvtsd2ssRR; |
| 223 | break; |
| 224 | case Instruction::FLOAT_TO_DOUBLE: |
| 225 | rcSrc = kFPReg; |
| 226 | op = kX86Cvtss2sdRR; |
| 227 | break; |
| 228 | case Instruction::INT_TO_DOUBLE: |
| 229 | rcSrc = kCoreReg; |
| 230 | op = kX86Cvtsi2sdRR; |
| 231 | break; |
| 232 | case Instruction::FLOAT_TO_INT: { |
| 233 | rl_src = LoadValue(rl_src, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 234 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 235 | ClobberSReg(rl_dest.s_reg_low); |
| 236 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 237 | RegStorage temp_reg = AllocTempSingle(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 238 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 239 | LoadConstant(rl_result.reg, 0x7fffffff); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 240 | NewLIR2(kX86Cvtsi2ssRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 241 | NewLIR2(kX86ComissRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 242 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 243 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 244 | NewLIR2(kX86Cvttss2siRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 245 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 246 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 247 | NewLIR2(kX86Xor32RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 248 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 249 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 250 | StoreValue(rl_dest, rl_result); |
| 251 | return; |
| 252 | } |
| 253 | case Instruction::DOUBLE_TO_INT: { |
| 254 | rl_src = LoadValueWide(rl_src, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 255 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 256 | ClobberSReg(rl_dest.s_reg_low); |
| 257 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 258 | RegStorage temp_reg = AllocTempDouble(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 259 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 260 | LoadConstant(rl_result.reg, 0x7fffffff); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 261 | NewLIR2(kX86Cvtsi2sdRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 262 | NewLIR2(kX86ComisdRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 263 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 264 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 265 | NewLIR2(kX86Cvttsd2siRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 266 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 267 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 268 | NewLIR2(kX86Xor32RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 269 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 270 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 271 | StoreValue(rl_dest, rl_result); |
| 272 | return; |
| 273 | } |
| 274 | case Instruction::LONG_TO_DOUBLE: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 275 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 276 | rcSrc = kCoreReg; |
| 277 | op = kX86Cvtsqi2sdRR; |
| 278 | break; |
| 279 | } |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 280 | GenLongToFP(rl_dest, rl_src, true /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 281 | return; |
| 282 | case Instruction::LONG_TO_FLOAT: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 283 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 284 | rcSrc = kCoreReg; |
| 285 | op = kX86Cvtsqi2ssRR; |
| 286 | break; |
| 287 | } |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 288 | GenLongToFP(rl_dest, rl_src, false /* is_double */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 289 | return; |
| 290 | case Instruction::FLOAT_TO_LONG: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 291 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 292 | rl_src = LoadValue(rl_src, kFPReg); |
| 293 | // If result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 294 | ClobberSReg(rl_dest.s_reg_low); |
| 295 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 296 | RegStorage temp_reg = AllocTempSingle(); |
| 297 | |
| 298 | // Set 0x7fffffffffffffff to rl_result |
| 299 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 300 | NewLIR2(kX86Cvtsqi2ssRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 301 | NewLIR2(kX86ComissRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 302 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 303 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 304 | NewLIR2(kX86Cvttss2sqiRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 305 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 306 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
| 307 | NewLIR2(kX86Xor64RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
| 308 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 309 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 310 | StoreValueWide(rl_dest, rl_result); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 311 | } else { |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 312 | GenConversionCall(kQuickF2l, rl_dest, rl_src); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 313 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 314 | return; |
| 315 | case Instruction::DOUBLE_TO_LONG: |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 316 | if (cu_->target64) { |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 317 | rl_src = LoadValueWide(rl_src, kFPReg); |
| 318 | // If result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 319 | ClobberSReg(rl_dest.s_reg_low); |
| 320 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 321 | RegStorage temp_reg = AllocTempDouble(); |
| 322 | |
| 323 | // Set 0x7fffffffffffffff to rl_result |
| 324 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 325 | NewLIR2(kX86Cvtsqi2sdRR, temp_reg.GetReg(), rl_result.reg.GetReg()); |
| 326 | NewLIR2(kX86ComisdRR, rl_src.reg.GetReg(), temp_reg.GetReg()); |
Serguei Katkov | 5078d97 | 2014-06-20 16:45:52 +0700 | [diff] [blame] | 327 | LIR* branch_pos_overflow = NewLIR2(kX86Jcc8, 0, kX86CondAe); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 328 | LIR* branch_na_n = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 329 | NewLIR2(kX86Cvttsd2sqiRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 330 | LIR* branch_normal = NewLIR1(kX86Jmp8, 0); |
| 331 | branch_na_n->target = NewLIR0(kPseudoTargetLabel); |
| 332 | NewLIR2(kX86Xor64RR, rl_result.reg.GetReg(), rl_result.reg.GetReg()); |
| 333 | branch_pos_overflow->target = NewLIR0(kPseudoTargetLabel); |
| 334 | branch_normal->target = NewLIR0(kPseudoTargetLabel); |
| 335 | StoreValueWide(rl_dest, rl_result); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 336 | } else { |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 337 | GenConversionCall(kQuickD2l, rl_dest, rl_src); |
Dmitry Petrochenko | 9ee801f | 2014-05-12 11:31:37 +0700 | [diff] [blame] | 338 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 339 | return; |
| 340 | default: |
| 341 | LOG(INFO) << "Unexpected opcode: " << opcode; |
| 342 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 343 | // At this point, target will be either float or double. |
| 344 | DCHECK(rl_dest.fp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 345 | if (rl_src.wide) { |
| 346 | rl_src = LoadValueWide(rl_src, rcSrc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 347 | } else { |
| 348 | rl_src = LoadValue(rl_src, rcSrc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 349 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 350 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 351 | NewLIR2(op, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 352 | if (rl_dest.wide) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 353 | StoreValueWide(rl_dest, rl_result); |
| 354 | } else { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 355 | StoreValue(rl_dest, rl_result); |
| 356 | } |
| 357 | } |
| 358 | |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 359 | void X86Mir2Lir::GenRemFP(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2, bool is_double) { |
| 360 | // Compute offsets to the source and destination VRs on stack. |
| 361 | int src1_v_reg_offset = SRegOffset(rl_src1.s_reg_low); |
| 362 | int src2_v_reg_offset = SRegOffset(rl_src2.s_reg_low); |
| 363 | int dest_v_reg_offset = SRegOffset(rl_dest.s_reg_low); |
| 364 | |
| 365 | // Update the in-register state of sources. |
| 366 | rl_src1 = is_double ? UpdateLocWide(rl_src1) : UpdateLoc(rl_src1); |
| 367 | rl_src2 = is_double ? UpdateLocWide(rl_src2) : UpdateLoc(rl_src2); |
| 368 | |
| 369 | // All memory accesses below reference dalvik regs. |
| 370 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
| 371 | |
| 372 | // If the source is in physical register, then put it in its location on stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 373 | const RegStorage rs_rSP = cu_->target64 ? rs_rX86_SP_64 : rs_rX86_SP_32; |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 374 | if (rl_src1.location == kLocPhysReg) { |
| 375 | RegisterInfo* reg_info = GetRegInfo(rl_src1.reg); |
| 376 | |
| 377 | if (reg_info != nullptr && reg_info->IsTemp()) { |
| 378 | // Calling FlushSpecificReg because it will only write back VR if it is dirty. |
| 379 | FlushSpecificReg(reg_info); |
| 380 | // ResetDef to prevent NullifyRange from removing stores. |
| 381 | ResetDef(rl_src1.reg); |
| 382 | } else { |
| 383 | // It must have been register promoted if it is not a temp but is still in physical |
| 384 | // register. Since we need it to be in memory to convert, we place it there now. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 385 | StoreBaseDisp(rs_rSP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 386 | kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 387 | } |
| 388 | } |
| 389 | |
| 390 | if (rl_src2.location == kLocPhysReg) { |
| 391 | RegisterInfo* reg_info = GetRegInfo(rl_src2.reg); |
| 392 | if (reg_info != nullptr && reg_info->IsTemp()) { |
| 393 | FlushSpecificReg(reg_info); |
| 394 | ResetDef(rl_src2.reg); |
| 395 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 396 | StoreBaseDisp(rs_rSP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 397 | kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 398 | } |
| 399 | } |
| 400 | |
| 401 | int fld_opcode = is_double ? kX86Fld64M : kX86Fld32M; |
| 402 | |
| 403 | // Push the source virtual registers onto the x87 stack. |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 404 | LIR *fld_2 = NewLIR2NoDest(fld_opcode, rs_rSP.GetReg(), |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 405 | src2_v_reg_offset + LOWORD_OFFSET); |
| 406 | AnnotateDalvikRegAccess(fld_2, (src2_v_reg_offset + LOWORD_OFFSET) >> 2, |
| 407 | true /* is_load */, is_double /* is64bit */); |
| 408 | |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 409 | LIR *fld_1 = NewLIR2NoDest(fld_opcode, rs_rSP.GetReg(), |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 410 | src1_v_reg_offset + LOWORD_OFFSET); |
| 411 | AnnotateDalvikRegAccess(fld_1, (src1_v_reg_offset + LOWORD_OFFSET) >> 2, |
| 412 | true /* is_load */, is_double /* is64bit */); |
| 413 | |
| 414 | FlushReg(rs_rAX); |
| 415 | Clobber(rs_rAX); |
| 416 | LockTemp(rs_rAX); |
| 417 | |
| 418 | LIR* retry = NewLIR0(kPseudoTargetLabel); |
| 419 | |
| 420 | // Divide ST(0) by ST(1) and place result to ST(0). |
| 421 | NewLIR0(kX86Fprem); |
| 422 | |
| 423 | // Move FPU status word to AX. |
| 424 | NewLIR0(kX86Fstsw16R); |
| 425 | |
| 426 | // Check if reduction is complete. |
| 427 | OpRegImm(kOpAnd, rs_rAX, 0x400); |
| 428 | |
| 429 | // If no then continue to compute remainder. |
| 430 | LIR* branch = NewLIR2(kX86Jcc8, 0, kX86CondNe); |
| 431 | branch->target = retry; |
| 432 | |
| 433 | FreeTemp(rs_rAX); |
| 434 | |
| 435 | // Now store result in the destination VR's stack location. |
| 436 | int displacement = dest_v_reg_offset + LOWORD_OFFSET; |
| 437 | int opcode = is_double ? kX86Fst64M : kX86Fst32M; |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 438 | LIR *fst = NewLIR2NoDest(opcode, rs_rSP.GetReg(), displacement); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 439 | AnnotateDalvikRegAccess(fst, displacement >> 2, false /* is_load */, is_double /* is64bit */); |
| 440 | |
| 441 | // Pop ST(1) and ST(0). |
| 442 | NewLIR0(kX86Fucompp); |
| 443 | |
| 444 | /* |
| 445 | * The result is in a physical register if it was in a temp or was register |
| 446 | * promoted. For that reason it is enough to check if it is in physical |
| 447 | * register. If it is, then we must do all of the bookkeeping necessary to |
| 448 | * invalidate temp (if needed) and load in promoted register (if needed). |
| 449 | * If the result's location is in memory, then we do not need to do anything |
| 450 | * more since the fstp has already placed the correct value in memory. |
| 451 | */ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 452 | RegLocation rl_result = is_double ? UpdateLocWideTyped(rl_dest) : UpdateLocTyped(rl_dest); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 453 | if (rl_result.location == kLocPhysReg) { |
| 454 | rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 455 | if (is_double) { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 456 | LoadBaseDisp(rs_rSP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 457 | StoreFinalValueWide(rl_dest, rl_result); |
| 458 | } else { |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 459 | Load32Disp(rs_rSP, dest_v_reg_offset, rl_result.reg); |
Alexei Zavjalov | bd3682e | 2014-06-12 03:08:01 +0700 | [diff] [blame] | 460 | StoreFinalValue(rl_dest, rl_result); |
| 461 | } |
| 462 | } |
| 463 | } |
| 464 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 465 | void X86Mir2Lir::GenCmpFP(Instruction::Code code, RegLocation rl_dest, |
| 466 | RegLocation rl_src1, RegLocation rl_src2) { |
| 467 | bool single = (code == Instruction::CMPL_FLOAT) || (code == Instruction::CMPG_FLOAT); |
| 468 | bool unordered_gt = (code == Instruction::CMPG_DOUBLE) || (code == Instruction::CMPG_FLOAT); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 469 | if (single) { |
| 470 | rl_src1 = LoadValue(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 471 | rl_src2 = LoadValue(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 472 | } else { |
| 473 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 474 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 475 | } |
| 476 | // In case result vreg is also src vreg, break association to avoid useless copy by EvalLoc() |
| 477 | ClobberSReg(rl_dest.s_reg_low); |
| 478 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 479 | LoadConstantNoClobber(rl_result.reg, unordered_gt ? 1 : 0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 480 | if (single) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 481 | NewLIR2(kX86UcomissRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 482 | } else { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 483 | NewLIR2(kX86UcomisdRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 484 | } |
| 485 | LIR* branch = NULL; |
| 486 | if (unordered_gt) { |
| 487 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 488 | } |
| 489 | // If the result reg can't be byte accessed, use a jump and move instead of a set. |
Chao-ying Fu | 7e399fd | 2014-06-10 18:11:11 -0700 | [diff] [blame] | 490 | if (!IsByteRegister(rl_result.reg)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 491 | LIR* branch2 = NULL; |
| 492 | if (unordered_gt) { |
| 493 | branch2 = NewLIR2(kX86Jcc8, 0, kX86CondA); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 494 | NewLIR2(kX86Mov32RI, rl_result.reg.GetReg(), 0x0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 495 | } else { |
| 496 | branch2 = NewLIR2(kX86Jcc8, 0, kX86CondBe); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 497 | NewLIR2(kX86Mov32RI, rl_result.reg.GetReg(), 0x1); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 498 | } |
| 499 | branch2->target = NewLIR0(kPseudoTargetLabel); |
| 500 | } else { |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 501 | NewLIR2(kX86Set8R, rl_result.reg.GetReg(), kX86CondA /* above - unsigned > */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 502 | } |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 503 | NewLIR2(kX86Sbb32RI, rl_result.reg.GetReg(), 0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 504 | if (unordered_gt) { |
| 505 | branch->target = NewLIR0(kPseudoTargetLabel); |
| 506 | } |
| 507 | StoreValue(rl_dest, rl_result); |
| 508 | } |
| 509 | |
| 510 | void X86Mir2Lir::GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, |
| 511 | bool is_double) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 512 | LIR* taken = &block_label_list_[bb->taken]; |
| 513 | LIR* not_taken = &block_label_list_[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 514 | LIR* branch = NULL; |
| 515 | RegLocation rl_src1; |
| 516 | RegLocation rl_src2; |
| 517 | if (is_double) { |
| 518 | rl_src1 = mir_graph_->GetSrcWide(mir, 0); |
| 519 | rl_src2 = mir_graph_->GetSrcWide(mir, 2); |
| 520 | rl_src1 = LoadValueWide(rl_src1, kFPReg); |
| 521 | rl_src2 = LoadValueWide(rl_src2, kFPReg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 522 | NewLIR2(kX86UcomisdRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 523 | } else { |
| 524 | rl_src1 = mir_graph_->GetSrc(mir, 0); |
| 525 | rl_src2 = mir_graph_->GetSrc(mir, 1); |
| 526 | rl_src1 = LoadValue(rl_src1, kFPReg); |
| 527 | rl_src2 = LoadValue(rl_src2, kFPReg); |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 528 | NewLIR2(kX86UcomissRR, rl_src1.reg.GetReg(), rl_src2.reg.GetReg()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 529 | } |
Vladimir Marko | a894607 | 2014-01-22 10:30:44 +0000 | [diff] [blame] | 530 | ConditionCode ccode = mir->meta.ccode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 531 | switch (ccode) { |
| 532 | case kCondEq: |
| 533 | if (!gt_bias) { |
| 534 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 535 | branch->target = not_taken; |
| 536 | } |
| 537 | break; |
| 538 | case kCondNe: |
| 539 | if (!gt_bias) { |
| 540 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 541 | branch->target = taken; |
| 542 | } |
| 543 | break; |
| 544 | case kCondLt: |
| 545 | if (gt_bias) { |
| 546 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 547 | branch->target = not_taken; |
| 548 | } |
Vladimir Marko | 58af1f9 | 2013-12-19 13:31:15 +0000 | [diff] [blame] | 549 | ccode = kCondUlt; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 550 | break; |
| 551 | case kCondLe: |
| 552 | if (gt_bias) { |
| 553 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 554 | branch->target = not_taken; |
| 555 | } |
| 556 | ccode = kCondLs; |
| 557 | break; |
| 558 | case kCondGt: |
| 559 | if (gt_bias) { |
| 560 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 561 | branch->target = taken; |
| 562 | } |
| 563 | ccode = kCondHi; |
| 564 | break; |
| 565 | case kCondGe: |
| 566 | if (gt_bias) { |
| 567 | branch = NewLIR2(kX86Jcc8, 0, kX86CondPE); |
| 568 | branch->target = taken; |
| 569 | } |
Vladimir Marko | 58af1f9 | 2013-12-19 13:31:15 +0000 | [diff] [blame] | 570 | ccode = kCondUge; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 571 | break; |
| 572 | default: |
| 573 | LOG(FATAL) << "Unexpected ccode: " << ccode; |
| 574 | } |
| 575 | OpCondBranch(ccode, taken); |
| 576 | } |
| 577 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 578 | void X86Mir2Lir::GenNegFloat(RegLocation rl_dest, RegLocation rl_src) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 579 | RegLocation rl_result; |
| 580 | rl_src = LoadValue(rl_src, kCoreReg); |
| 581 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 582 | OpRegRegImm(kOpAdd, rl_result.reg, rl_src.reg, 0x80000000); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 583 | StoreValue(rl_dest, rl_result); |
| 584 | } |
| 585 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 586 | void X86Mir2Lir::GenNegDouble(RegLocation rl_dest, RegLocation rl_src) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 587 | RegLocation rl_result; |
| 588 | rl_src = LoadValueWide(rl_src, kCoreReg); |
Elena Sayapina | dd64450 | 2014-07-01 18:39:52 +0700 | [diff] [blame] | 589 | if (cu_->target64) { |
Pavel Vyssotski | d4812a9 | 2014-11-11 12:37:56 +0600 | [diff] [blame] | 590 | rl_result = EvalLocWide(rl_dest, kCoreReg, true); |
Alexei Zavjalov | 02959ea | 2014-06-18 17:18:36 +0700 | [diff] [blame] | 591 | OpRegCopy(rl_result.reg, rl_src.reg); |
| 592 | // Flip sign bit. |
| 593 | NewLIR2(kX86Rol64RI, rl_result.reg.GetReg(), 1); |
| 594 | NewLIR2(kX86Xor64RI, rl_result.reg.GetReg(), 1); |
| 595 | NewLIR2(kX86Ror64RI, rl_result.reg.GetReg(), 1); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 596 | } else { |
Pavel Vyssotski | d4812a9 | 2014-11-11 12:37:56 +0600 | [diff] [blame] | 597 | rl_result = ForceTempWide(rl_src); |
| 598 | OpRegRegImm(kOpAdd, rl_result.reg.GetHigh(), rl_result.reg.GetHigh(), 0x80000000); |
Chao-ying Fu | a014776 | 2014-06-06 18:38:49 -0700 | [diff] [blame] | 599 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 600 | StoreValueWide(rl_dest, rl_result); |
| 601 | } |
| 602 | |
| 603 | bool X86Mir2Lir::GenInlinedSqrt(CallInfo* info) { |
Mark Mendell | bff1ef0 | 2013-12-13 13:47:34 -0800 | [diff] [blame] | 604 | RegLocation rl_dest = InlineTargetWide(info); // double place for result |
Chao-ying Fu | ff87d7b | 2015-01-19 15:51:57 -0800 | [diff] [blame] | 605 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 606 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 607 | return true; |
| 608 | } |
| 609 | RegLocation rl_src = info->args[0]; |
Mark Mendell | bff1ef0 | 2013-12-13 13:47:34 -0800 | [diff] [blame] | 610 | rl_src = LoadValueWide(rl_src, kFPReg); |
| 611 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 612 | NewLIR2(kX86SqrtsdRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
Mark Mendell | bff1ef0 | 2013-12-13 13:47:34 -0800 | [diff] [blame] | 613 | StoreValueWide(rl_dest, rl_result); |
| 614 | return true; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 615 | } |
| 616 | |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 617 | bool X86Mir2Lir::GenInlinedAbsFloat(CallInfo* info) { |
| 618 | // Get the argument |
| 619 | RegLocation rl_src = info->args[0]; |
| 620 | |
| 621 | // Get the inlined intrinsic target virtual register |
| 622 | RegLocation rl_dest = InlineTarget(info); |
| 623 | |
| 624 | // Get the virtual register number |
| 625 | DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); |
| 626 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 627 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 628 | return true; |
| 629 | } |
| 630 | int v_src_reg = mir_graph_->SRegToVReg(rl_src.s_reg_low); |
| 631 | int v_dst_reg = mir_graph_->SRegToVReg(rl_dest.s_reg_low); |
| 632 | |
| 633 | // if argument is the same as inlined intrinsic target |
| 634 | if (v_src_reg == v_dst_reg) { |
| 635 | rl_src = UpdateLoc(rl_src); |
| 636 | |
| 637 | // if argument is in the physical register |
| 638 | if (rl_src.location == kLocPhysReg) { |
| 639 | rl_src = LoadValue(rl_src, kCoreReg); |
| 640 | OpRegImm(kOpAnd, rl_src.reg, 0x7fffffff); |
| 641 | StoreValue(rl_dest, rl_src); |
| 642 | return true; |
| 643 | } |
| 644 | // the argument is in memory |
| 645 | DCHECK((rl_src.location == kLocDalvikFrame) || |
| 646 | (rl_src.location == kLocCompilerTemp)); |
| 647 | |
| 648 | // Operate directly into memory. |
| 649 | int displacement = SRegOffset(rl_dest.s_reg_low); |
| 650 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 651 | LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement, 0x7fffffff); |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 652 | AnnotateDalvikRegAccess(lir, displacement >> 2, false /*is_load */, false /* is_64bit */); |
| 653 | AnnotateDalvikRegAccess(lir, displacement >> 2, true /* is_load */, false /* is_64bit*/); |
| 654 | return true; |
| 655 | } else { |
| 656 | rl_src = LoadValue(rl_src, kCoreReg); |
| 657 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 658 | OpRegRegImm(kOpAnd, rl_result.reg, rl_src.reg, 0x7fffffff); |
| 659 | StoreValue(rl_dest, rl_result); |
| 660 | return true; |
| 661 | } |
| 662 | } |
| 663 | |
| 664 | bool X86Mir2Lir::GenInlinedAbsDouble(CallInfo* info) { |
| 665 | RegLocation rl_src = info->args[0]; |
| 666 | RegLocation rl_dest = InlineTargetWide(info); |
| 667 | DCHECK_NE(rl_src.s_reg_low, INVALID_SREG); |
| 668 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 669 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 670 | return true; |
| 671 | } |
nikolay serdjuk | c5e4ce1 | 2014-06-10 17:07:10 +0700 | [diff] [blame] | 672 | if (cu_->target64) { |
| 673 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 674 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 675 | OpRegCopyWide(rl_result.reg, rl_src.reg); |
| 676 | OpRegImm(kOpLsl, rl_result.reg, 1); |
| 677 | OpRegImm(kOpLsr, rl_result.reg, 1); |
| 678 | StoreValueWide(rl_dest, rl_result); |
| 679 | return true; |
| 680 | } |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 681 | int v_src_reg = mir_graph_->SRegToVReg(rl_src.s_reg_low); |
| 682 | int v_dst_reg = mir_graph_->SRegToVReg(rl_dest.s_reg_low); |
| 683 | rl_src = UpdateLocWide(rl_src); |
| 684 | |
| 685 | // if argument is in the physical XMM register |
| 686 | if (rl_src.location == kLocPhysReg && rl_src.reg.IsFloat()) { |
| 687 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 688 | if (rl_result.reg != rl_src.reg) { |
| 689 | LoadConstantWide(rl_result.reg, 0x7fffffffffffffff); |
| 690 | NewLIR2(kX86PandRR, rl_result.reg.GetReg(), rl_src.reg.GetReg()); |
| 691 | } else { |
| 692 | RegStorage sign_mask = AllocTempDouble(); |
| 693 | LoadConstantWide(sign_mask, 0x7fffffffffffffff); |
| 694 | NewLIR2(kX86PandRR, rl_result.reg.GetReg(), sign_mask.GetReg()); |
| 695 | FreeTemp(sign_mask); |
| 696 | } |
| 697 | StoreValueWide(rl_dest, rl_result); |
| 698 | return true; |
| 699 | } else if (v_src_reg == v_dst_reg) { |
| 700 | // if argument is the same as inlined intrinsic target |
| 701 | // if argument is in the physical register |
| 702 | if (rl_src.location == kLocPhysReg) { |
| 703 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 704 | OpRegImm(kOpAnd, rl_src.reg.GetHigh(), 0x7fffffff); |
| 705 | StoreValueWide(rl_dest, rl_src); |
| 706 | return true; |
| 707 | } |
| 708 | // the argument is in memory |
| 709 | DCHECK((rl_src.location == kLocDalvikFrame) || |
| 710 | (rl_src.location == kLocCompilerTemp)); |
| 711 | |
| 712 | // Operate directly into memory. |
| 713 | int displacement = SRegOffset(rl_dest.s_reg_low); |
| 714 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Ian Rogers | b28c1c0 | 2014-11-08 11:21:21 -0800 | [diff] [blame] | 715 | LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement + HIWORD_OFFSET, 0x7fffffff); |
Yixin Shou | 7071c8d | 2014-03-05 06:07:48 -0500 | [diff] [blame] | 716 | AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, true /* is_load */, true /* is_64bit*/); |
| 717 | AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, false /*is_load */, true /* is_64bit */); |
| 718 | return true; |
| 719 | } else { |
| 720 | rl_src = LoadValueWide(rl_src, kCoreReg); |
| 721 | RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true); |
| 722 | OpRegCopyWide(rl_result.reg, rl_src.reg); |
| 723 | OpRegImm(kOpAnd, rl_result.reg.GetHigh(), 0x7fffffff); |
| 724 | StoreValueWide(rl_dest, rl_result); |
| 725 | return true; |
| 726 | } |
| 727 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 728 | |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 729 | bool X86Mir2Lir::GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double) { |
| 730 | if (is_double) { |
Chao-ying Fu | ff87d7b | 2015-01-19 15:51:57 -0800 | [diff] [blame] | 731 | RegLocation rl_dest = InlineTargetWide(info); |
| 732 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 733 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 734 | return true; |
| 735 | } |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 736 | RegLocation rl_src1 = LoadValueWide(info->args[0], kFPReg); |
| 737 | RegLocation rl_src2 = LoadValueWide(info->args[2], kFPReg); |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 738 | RegLocation rl_result = EvalLocWide(rl_dest, kFPReg, true); |
| 739 | |
| 740 | // Avoid src2 corruption by OpRegCopyWide. |
| 741 | if (rl_result.reg == rl_src2.reg) { |
| 742 | std::swap(rl_src2.reg, rl_src1.reg); |
| 743 | } |
| 744 | |
| 745 | OpRegCopyWide(rl_result.reg, rl_src1.reg); |
| 746 | NewLIR2(kX86UcomisdRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 747 | // If either arg is NaN, return NaN. |
| 748 | LIR* branch_nan = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 749 | // Min/Max branches. |
| 750 | LIR* branch_cond1 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondA : kX86CondB); |
| 751 | LIR* branch_cond2 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondB : kX86CondA); |
| 752 | // If equal, we need to resolve situations like min/max(0.0, -0.0) == -0.0/0.0. |
| 753 | NewLIR2((is_min) ? kX86OrpdRR : kX86AndpdRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 754 | LIR* branch_exit_equal = NewLIR1(kX86Jmp8, 0); |
| 755 | // Handle NaN. |
| 756 | branch_nan->target = NewLIR0(kPseudoTargetLabel); |
| 757 | LoadConstantWide(rl_result.reg, INT64_C(0x7ff8000000000000)); |
Razvan A Lupusoru | e5beb18 | 2014-08-14 13:49:57 +0800 | [diff] [blame] | 758 | |
| 759 | // The base_of_code_ compiler temp is non-null when it is reserved |
| 760 | // for being able to do data accesses relative to method start. |
| 761 | if (base_of_code_ != nullptr) { |
| 762 | // Loading from the constant pool may have used base of code register. |
| 763 | // However, the code here generates logic in diamond shape and not all |
| 764 | // paths load base of code register. Therefore, we ensure it is clobbered so |
| 765 | // that the temp caching system does not believe it is live at merge point. |
| 766 | RegLocation rl_method = mir_graph_->GetRegLocation(base_of_code_->s_reg_low); |
| 767 | if (rl_method.wide) { |
| 768 | rl_method = UpdateLocWide(rl_method); |
| 769 | } else { |
| 770 | rl_method = UpdateLoc(rl_method); |
| 771 | } |
| 772 | if (rl_method.location == kLocPhysReg) { |
| 773 | Clobber(rl_method.reg); |
| 774 | } |
| 775 | } |
| 776 | |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 777 | LIR* branch_exit_nan = NewLIR1(kX86Jmp8, 0); |
| 778 | // Handle Min/Max. Copy greater/lesser value from src2. |
| 779 | branch_cond1->target = NewLIR0(kPseudoTargetLabel); |
| 780 | OpRegCopyWide(rl_result.reg, rl_src2.reg); |
| 781 | // Right operand is already in result reg. |
| 782 | branch_cond2->target = NewLIR0(kPseudoTargetLabel); |
| 783 | // Exit. |
| 784 | branch_exit_nan->target = NewLIR0(kPseudoTargetLabel); |
| 785 | branch_exit_equal->target = NewLIR0(kPseudoTargetLabel); |
| 786 | StoreValueWide(rl_dest, rl_result); |
| 787 | } else { |
Chao-ying Fu | ff87d7b | 2015-01-19 15:51:57 -0800 | [diff] [blame] | 788 | RegLocation rl_dest = InlineTarget(info); |
| 789 | if (rl_dest.s_reg_low == INVALID_SREG) { |
| 790 | // Result is unused, the code is dead. Inlining successful, no code generated. |
| 791 | return true; |
| 792 | } |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 793 | RegLocation rl_src1 = LoadValue(info->args[0], kFPReg); |
| 794 | RegLocation rl_src2 = LoadValue(info->args[1], kFPReg); |
Alexei Zavjalov | 1222c96 | 2014-07-16 00:54:13 +0700 | [diff] [blame] | 795 | RegLocation rl_result = EvalLoc(rl_dest, kFPReg, true); |
| 796 | |
| 797 | // Avoid src2 corruption by OpRegCopyWide. |
| 798 | if (rl_result.reg == rl_src2.reg) { |
| 799 | std::swap(rl_src2.reg, rl_src1.reg); |
| 800 | } |
| 801 | |
| 802 | OpRegCopy(rl_result.reg, rl_src1.reg); |
| 803 | NewLIR2(kX86UcomissRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 804 | // If either arg is NaN, return NaN. |
| 805 | LIR* branch_nan = NewLIR2(kX86Jcc8, 0, kX86CondP); |
| 806 | // Min/Max branches. |
| 807 | LIR* branch_cond1 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondA : kX86CondB); |
| 808 | LIR* branch_cond2 = NewLIR2(kX86Jcc8, 0, (is_min) ? kX86CondB : kX86CondA); |
| 809 | // If equal, we need to resolve situations like min/max(0.0, -0.0) == -0.0/0.0. |
| 810 | NewLIR2((is_min) ? kX86OrpsRR : kX86AndpsRR, rl_result.reg.GetReg(), rl_src2.reg.GetReg()); |
| 811 | LIR* branch_exit_equal = NewLIR1(kX86Jmp8, 0); |
| 812 | // Handle NaN. |
| 813 | branch_nan->target = NewLIR0(kPseudoTargetLabel); |
| 814 | LoadConstantNoClobber(rl_result.reg, 0x7fc00000); |
| 815 | LIR* branch_exit_nan = NewLIR1(kX86Jmp8, 0); |
| 816 | // Handle Min/Max. Copy greater/lesser value from src2. |
| 817 | branch_cond1->target = NewLIR0(kPseudoTargetLabel); |
| 818 | OpRegCopy(rl_result.reg, rl_src2.reg); |
| 819 | // Right operand is already in result reg. |
| 820 | branch_cond2->target = NewLIR0(kPseudoTargetLabel); |
| 821 | // Exit. |
| 822 | branch_exit_nan->target = NewLIR0(kPseudoTargetLabel); |
| 823 | branch_exit_equal->target = NewLIR0(kPseudoTargetLabel); |
| 824 | StoreValue(rl_dest, rl_result); |
| 825 | } |
| 826 | return true; |
| 827 | } |
| 828 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 829 | } // namespace art |