Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dex/compiler_internals.h" |
| 18 | #include "dex/dataflow_iterator-inl.h" |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 19 | #include "dex/quick/dex_file_method_inliner.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "mir_to_lir-inl.h" |
Ian Rogers | 02ed4c0 | 2013-09-06 13:10:04 -0700 | [diff] [blame] | 21 | #include "thread-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 22 | |
| 23 | namespace art { |
| 24 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 25 | RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) { |
| 26 | RegisterClass res; |
| 27 | switch (shorty_type) { |
| 28 | case 'L': |
| 29 | res = kRefReg; |
| 30 | break; |
| 31 | case 'F': |
| 32 | // Expected fallthrough. |
| 33 | case 'D': |
| 34 | res = kFPReg; |
| 35 | break; |
| 36 | default: |
| 37 | res = kCoreReg; |
| 38 | } |
| 39 | return res; |
| 40 | } |
| 41 | |
| 42 | RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) { |
| 43 | RegisterClass res; |
| 44 | if (loc.fp) { |
| 45 | DCHECK(!loc.ref) << "At most, one of ref/fp may be set"; |
| 46 | res = kFPReg; |
| 47 | } else if (loc.ref) { |
| 48 | res = kRefReg; |
| 49 | } else { |
| 50 | res = kCoreReg; |
| 51 | } |
| 52 | return res; |
| 53 | } |
| 54 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 55 | void Mir2Lir::LockArg(int in_position, bool wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 56 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 57 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 58 | RegStorage::InvalidReg(); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 59 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 60 | if (reg_arg_low.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 61 | LockTemp(reg_arg_low); |
| 62 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 63 | if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 64 | LockTemp(reg_arg_high); |
| 65 | } |
| 66 | } |
| 67 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 68 | // TODO: simplify when 32-bit targets go hard-float. |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 69 | RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 70 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 71 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 72 | |
| 73 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 74 | /* |
| 75 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 76 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 77 | */ |
| 78 | offset += sizeof(uint32_t); |
| 79 | } |
| 80 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 81 | if (cu_->instruction_set == kX86_64) { |
| 82 | /* |
| 83 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 84 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 85 | */ |
| 86 | offset += sizeof(uint64_t); |
| 87 | } |
| 88 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 89 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 90 | RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position); |
| 91 | if (!reg_arg.Valid()) { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 92 | RegStorage new_reg = |
| 93 | wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 94 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 95 | return new_reg; |
| 96 | } else { |
| 97 | // Check if we need to copy the arg to a different reg_class. |
| 98 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 99 | if (wide) { |
| 100 | RegStorage new_reg = AllocTypedTempWide(false, reg_class); |
| 101 | OpRegCopyWide(new_reg, reg_arg); |
| 102 | reg_arg = new_reg; |
| 103 | } else { |
| 104 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 105 | OpRegCopy(new_reg, reg_arg); |
| 106 | reg_arg = new_reg; |
| 107 | } |
| 108 | } |
| 109 | } |
| 110 | return reg_arg; |
| 111 | } |
| 112 | |
| 113 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 114 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 115 | RegStorage::InvalidReg(); |
| 116 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 117 | // If the VR is wide and there is no register for high part, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 118 | if (wide && !reg_arg_high.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 119 | // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 120 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 121 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 122 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 123 | return new_regs; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 124 | } else { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 125 | // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory, |
| 126 | // i.e. the low part is in a core reg. Load the second part in a core reg as well for now. |
| 127 | DCHECK(!reg_arg_low.IsFloat()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 128 | reg_arg_high = AllocTemp(); |
| 129 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 130 | Load32Disp(TargetPtrReg(kSp), offset_high, reg_arg_high); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 131 | // Continue below to check the reg_class. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 132 | } |
| 133 | } |
| 134 | |
| 135 | // If the low part is not in a register yet, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 136 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 137 | // Assume that if the low part of a wide arg is passed in memory, so is the high part, |
| 138 | // thus we don't get here for wide args as it's handled above. Big-endian ABIs could |
| 139 | // conceivably break this assumption but Android supports only little-endian architectures. |
| 140 | DCHECK(!wide); |
| 141 | reg_arg_low = AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 142 | Load32Disp(TargetPtrReg(kSp), offset, reg_arg_low); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 143 | return reg_arg_low; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 144 | } |
| 145 | |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 146 | RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low; |
| 147 | // Check if we need to copy the arg to a different reg_class. |
| 148 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 149 | if (wide) { |
| 150 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
| 151 | OpRegCopyWide(new_regs, reg_arg); |
| 152 | reg_arg = new_regs; |
| 153 | } else { |
| 154 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 155 | OpRegCopy(new_reg, reg_arg); |
| 156 | reg_arg = new_reg; |
| 157 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 158 | } |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 159 | return reg_arg; |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 160 | } |
| 161 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 162 | // TODO: simpilfy when 32-bit targets go hard float. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 163 | void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 164 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 165 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 166 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 167 | /* |
| 168 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 169 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 170 | */ |
| 171 | offset += sizeof(uint32_t); |
| 172 | } |
| 173 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 174 | if (cu_->instruction_set == kX86_64) { |
| 175 | /* |
| 176 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 177 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 178 | */ |
| 179 | offset += sizeof(uint64_t); |
| 180 | } |
| 181 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 182 | if (!rl_dest.wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 183 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 184 | if (reg.Valid()) { |
| 185 | OpRegCopy(rl_dest.reg, reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 186 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 187 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 188 | } |
| 189 | } else { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 190 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 191 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 192 | if (reg.Valid()) { |
| 193 | OpRegCopy(rl_dest.reg, reg); |
| 194 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 195 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 196 | } |
| 197 | return; |
| 198 | } |
| 199 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 200 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 201 | RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 202 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 203 | if (reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 204 | OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high)); |
| 205 | } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) { |
| 206 | OpRegCopy(rl_dest.reg, reg_arg_low); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 207 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 208 | Load32Disp(TargetPtrReg(kSp), offset_high, rl_dest.reg.GetHigh()); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 209 | } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 210 | OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 211 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg.GetLow()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 212 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 213 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 214 | } |
| 215 | } |
| 216 | } |
| 217 | |
| 218 | bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) { |
| 219 | // FastInstance() already checked by DexFileMethodInliner. |
| 220 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 221 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 222 | // The object is not "this" and has to be null-checked. |
| 223 | return false; |
| 224 | } |
| 225 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 226 | bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 227 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 228 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 229 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 230 | return false; |
| 231 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 232 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 233 | // Point of no return - no aborts after this |
| 234 | GenPrintLabel(mir); |
| 235 | LockArg(data.object_arg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 236 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 237 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 238 | RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 239 | RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 240 | RegStorage r_result = rl_dest.reg; |
| 241 | if (!RegClassMatches(reg_class, r_result)) { |
| 242 | r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class) |
| 243 | : AllocTypedTemp(rl_dest.fp, reg_class); |
| 244 | } |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 245 | if (ref) { |
| 246 | LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 247 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 248 | LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile : |
| 249 | kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 250 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 251 | if (r_result.NotExactlyEquals(rl_dest.reg)) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 252 | if (wide) { |
| 253 | OpRegCopyWide(rl_dest.reg, r_result); |
| 254 | } else { |
| 255 | OpRegCopy(rl_dest.reg, r_result); |
| 256 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 257 | } |
| 258 | return true; |
| 259 | } |
| 260 | |
| 261 | bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) { |
| 262 | // FastInstance() already checked by DexFileMethodInliner. |
| 263 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 264 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 265 | // The object is not "this" and has to be null-checked. |
| 266 | return false; |
| 267 | } |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 268 | if (data.return_arg_plus1 != 0u) { |
| 269 | // The setter returns a method argument which we don't support here. |
| 270 | return false; |
| 271 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 272 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 273 | bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 274 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 275 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 276 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 277 | return false; |
| 278 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 279 | |
| 280 | // Point of no return - no aborts after this |
| 281 | GenPrintLabel(mir); |
| 282 | LockArg(data.object_arg); |
| 283 | LockArg(data.src_arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 284 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 285 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
| 286 | RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 287 | if (ref) { |
| 288 | StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 289 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 290 | StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile : |
| 291 | kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 292 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 293 | if (ref) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 294 | MarkGCCard(reg_src, reg_obj); |
| 295 | } |
| 296 | return true; |
| 297 | } |
| 298 | |
| 299 | bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) { |
| 300 | const InlineReturnArgData& data = special.d.return_data; |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 301 | bool wide = (data.is_wide != 0u); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 302 | |
| 303 | // Point of no return - no aborts after this |
| 304 | GenPrintLabel(mir); |
| 305 | LockArg(data.arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 306 | RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 307 | RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 308 | LoadArgDirect(data.arg, rl_dest); |
| 309 | return true; |
| 310 | } |
| 311 | |
| 312 | /* |
| 313 | * Special-case code generation for simple non-throwing leaf methods. |
| 314 | */ |
| 315 | bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) { |
| 316 | DCHECK(special.flags & kInlineSpecial); |
| 317 | current_dalvik_offset_ = mir->offset; |
| 318 | MIR* return_mir = nullptr; |
| 319 | bool successful = false; |
| 320 | |
| 321 | switch (special.opcode) { |
| 322 | case kInlineOpNop: |
| 323 | successful = true; |
| 324 | DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID); |
| 325 | return_mir = mir; |
| 326 | break; |
| 327 | case kInlineOpNonWideConst: { |
| 328 | successful = true; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 329 | RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0])); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 330 | GenPrintLabel(mir); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 331 | LoadConstant(rl_dest.reg, static_cast<int>(special.d.data)); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 332 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 333 | break; |
| 334 | } |
| 335 | case kInlineOpReturnArg: |
| 336 | successful = GenSpecialIdentity(mir, special); |
| 337 | return_mir = mir; |
| 338 | break; |
| 339 | case kInlineOpIGet: |
| 340 | successful = GenSpecialIGet(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 341 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 342 | break; |
| 343 | case kInlineOpIPut: |
| 344 | successful = GenSpecialIPut(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 345 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 346 | break; |
| 347 | default: |
| 348 | break; |
| 349 | } |
| 350 | |
| 351 | if (successful) { |
Vladimir Marko | 39d95e6 | 2014-02-28 12:51:24 +0000 | [diff] [blame] | 352 | if (kIsDebugBuild) { |
| 353 | // Clear unreachable catch entries. |
| 354 | mir_graph_->catches_.clear(); |
| 355 | } |
| 356 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 357 | // Handle verbosity for return MIR. |
| 358 | if (return_mir != nullptr) { |
| 359 | current_dalvik_offset_ = return_mir->offset; |
| 360 | // Not handling special identity case because it already generated code as part |
| 361 | // of the return. The label should have been added before any code was generated. |
| 362 | if (special.opcode != kInlineOpReturnArg) { |
| 363 | GenPrintLabel(return_mir); |
| 364 | } |
| 365 | } |
| 366 | GenSpecialExitSequence(); |
| 367 | |
| 368 | core_spill_mask_ = 0; |
| 369 | num_core_spills_ = 0; |
| 370 | fp_spill_mask_ = 0; |
| 371 | num_fp_spills_ = 0; |
| 372 | frame_size_ = 0; |
| 373 | core_vmap_table_.clear(); |
| 374 | fp_vmap_table_.clear(); |
| 375 | } |
| 376 | |
| 377 | return successful; |
| 378 | } |
| 379 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 380 | /* |
| 381 | * Target-independent code generation. Use only high-level |
| 382 | * load/store utilities here, or target-dependent genXX() handlers |
| 383 | * when necessary. |
| 384 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 385 | void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 386 | RegLocation rl_src[3]; |
| 387 | RegLocation rl_dest = mir_graph_->GetBadLoc(); |
| 388 | RegLocation rl_result = mir_graph_->GetBadLoc(); |
| 389 | Instruction::Code opcode = mir->dalvikInsn.opcode; |
| 390 | int opt_flags = mir->optimization_flags; |
| 391 | uint32_t vB = mir->dalvikInsn.vB; |
| 392 | uint32_t vC = mir->dalvikInsn.vC; |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 393 | DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:" |
| 394 | << std::hex << current_dalvik_offset_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 395 | |
| 396 | // Prep Src and Dest locations. |
| 397 | int next_sreg = 0; |
| 398 | int next_loc = 0; |
Jean Christophe Beyler | cc794c3 | 2014-05-02 09:34:13 -0700 | [diff] [blame] | 399 | uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 400 | rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc(); |
| 401 | if (attrs & DF_UA) { |
| 402 | if (attrs & DF_A_WIDE) { |
| 403 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 404 | next_sreg+= 2; |
| 405 | } else { |
| 406 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 407 | next_sreg++; |
| 408 | } |
| 409 | } |
| 410 | if (attrs & DF_UB) { |
| 411 | if (attrs & DF_B_WIDE) { |
| 412 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 413 | next_sreg+= 2; |
| 414 | } else { |
| 415 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 416 | next_sreg++; |
| 417 | } |
| 418 | } |
| 419 | if (attrs & DF_UC) { |
| 420 | if (attrs & DF_C_WIDE) { |
| 421 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 422 | } else { |
| 423 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 424 | } |
| 425 | } |
| 426 | if (attrs & DF_DA) { |
| 427 | if (attrs & DF_A_WIDE) { |
| 428 | rl_dest = mir_graph_->GetDestWide(mir); |
| 429 | } else { |
| 430 | rl_dest = mir_graph_->GetDest(mir); |
| 431 | } |
| 432 | } |
| 433 | switch (opcode) { |
| 434 | case Instruction::NOP: |
| 435 | break; |
| 436 | |
| 437 | case Instruction::MOVE_EXCEPTION: |
| 438 | GenMoveException(rl_dest); |
| 439 | break; |
| 440 | |
| 441 | case Instruction::RETURN_VOID: |
| 442 | if (((cu_->access_flags & kAccConstructor) != 0) && |
| 443 | cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file, |
| 444 | cu_->class_def_idx)) { |
| 445 | GenMemBarrier(kStoreStore); |
| 446 | } |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 447 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 448 | GenSuspendTest(opt_flags); |
| 449 | } |
| 450 | break; |
| 451 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 452 | case Instruction::RETURN_OBJECT: |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 453 | DCHECK(rl_src[0].ref); |
| 454 | // Intentional fallthrough. |
| 455 | case Instruction::RETURN: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 456 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 457 | GenSuspendTest(opt_flags); |
| 458 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 459 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 460 | StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 461 | break; |
| 462 | |
| 463 | case Instruction::RETURN_WIDE: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 464 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 465 | GenSuspendTest(opt_flags); |
| 466 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 467 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 468 | StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 469 | break; |
| 470 | |
| 471 | case Instruction::MOVE_RESULT_WIDE: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 472 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 473 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 474 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 475 | StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 476 | break; |
| 477 | |
| 478 | case Instruction::MOVE_RESULT: |
| 479 | case Instruction::MOVE_RESULT_OBJECT: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 480 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 481 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 482 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 483 | StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 484 | break; |
| 485 | |
| 486 | case Instruction::MOVE: |
| 487 | case Instruction::MOVE_OBJECT: |
| 488 | case Instruction::MOVE_16: |
| 489 | case Instruction::MOVE_OBJECT_16: |
| 490 | case Instruction::MOVE_FROM16: |
| 491 | case Instruction::MOVE_OBJECT_FROM16: |
| 492 | StoreValue(rl_dest, rl_src[0]); |
| 493 | break; |
| 494 | |
| 495 | case Instruction::MOVE_WIDE: |
| 496 | case Instruction::MOVE_WIDE_16: |
| 497 | case Instruction::MOVE_WIDE_FROM16: |
| 498 | StoreValueWide(rl_dest, rl_src[0]); |
| 499 | break; |
| 500 | |
| 501 | case Instruction::CONST: |
| 502 | case Instruction::CONST_4: |
| 503 | case Instruction::CONST_16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 504 | GenConst(rl_dest, vB); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 505 | break; |
| 506 | |
| 507 | case Instruction::CONST_HIGH16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 508 | GenConst(rl_dest, vB << 16); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 509 | break; |
| 510 | |
| 511 | case Instruction::CONST_WIDE_16: |
| 512 | case Instruction::CONST_WIDE_32: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 513 | GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 514 | break; |
| 515 | |
| 516 | case Instruction::CONST_WIDE: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 517 | GenConstWide(rl_dest, mir->dalvikInsn.vB_wide); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 518 | break; |
| 519 | |
| 520 | case Instruction::CONST_WIDE_HIGH16: |
| 521 | rl_result = EvalLoc(rl_dest, kAnyReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 522 | LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 523 | StoreValueWide(rl_dest, rl_result); |
| 524 | break; |
| 525 | |
| 526 | case Instruction::MONITOR_ENTER: |
| 527 | GenMonitorEnter(opt_flags, rl_src[0]); |
| 528 | break; |
| 529 | |
| 530 | case Instruction::MONITOR_EXIT: |
| 531 | GenMonitorExit(opt_flags, rl_src[0]); |
| 532 | break; |
| 533 | |
| 534 | case Instruction::CHECK_CAST: { |
| 535 | GenCheckCast(mir->offset, vB, rl_src[0]); |
| 536 | break; |
| 537 | } |
| 538 | case Instruction::INSTANCE_OF: |
| 539 | GenInstanceof(vC, rl_dest, rl_src[0]); |
| 540 | break; |
| 541 | |
| 542 | case Instruction::NEW_INSTANCE: |
| 543 | GenNewInstance(vB, rl_dest); |
| 544 | break; |
| 545 | |
| 546 | case Instruction::THROW: |
| 547 | GenThrow(rl_src[0]); |
| 548 | break; |
| 549 | |
| 550 | case Instruction::ARRAY_LENGTH: |
| 551 | int len_offset; |
| 552 | len_offset = mirror::Array::LengthOffset().Int32Value(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 553 | rl_src[0] = LoadValue(rl_src[0], kRefReg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 554 | GenNullCheck(rl_src[0].reg, opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 555 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 556 | Load32Disp(rl_src[0].reg, len_offset, rl_result.reg); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 557 | MarkPossibleNullPointerException(opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 558 | StoreValue(rl_dest, rl_result); |
| 559 | break; |
| 560 | |
| 561 | case Instruction::CONST_STRING: |
| 562 | case Instruction::CONST_STRING_JUMBO: |
| 563 | GenConstString(vB, rl_dest); |
| 564 | break; |
| 565 | |
| 566 | case Instruction::CONST_CLASS: |
| 567 | GenConstClass(vB, rl_dest); |
| 568 | break; |
| 569 | |
| 570 | case Instruction::FILL_ARRAY_DATA: |
| 571 | GenFillArrayData(vB, rl_src[0]); |
| 572 | break; |
| 573 | |
| 574 | case Instruction::FILLED_NEW_ARRAY: |
| 575 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 576 | false /* not range */)); |
| 577 | break; |
| 578 | |
| 579 | case Instruction::FILLED_NEW_ARRAY_RANGE: |
| 580 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 581 | true /* range */)); |
| 582 | break; |
| 583 | |
| 584 | case Instruction::NEW_ARRAY: |
| 585 | GenNewArray(vC, rl_dest, rl_src[0]); |
| 586 | break; |
| 587 | |
| 588 | case Instruction::GOTO: |
| 589 | case Instruction::GOTO_16: |
| 590 | case Instruction::GOTO_32: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 591 | if (mir_graph_->IsBackedge(bb, bb->taken) && |
| 592 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 593 | GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 594 | } else { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 595 | OpUnconditionalBranch(&label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 596 | } |
| 597 | break; |
| 598 | |
| 599 | case Instruction::PACKED_SWITCH: |
| 600 | GenPackedSwitch(mir, vB, rl_src[0]); |
| 601 | break; |
| 602 | |
| 603 | case Instruction::SPARSE_SWITCH: |
| 604 | GenSparseSwitch(mir, vB, rl_src[0]); |
| 605 | break; |
| 606 | |
| 607 | case Instruction::CMPL_FLOAT: |
| 608 | case Instruction::CMPG_FLOAT: |
| 609 | case Instruction::CMPL_DOUBLE: |
| 610 | case Instruction::CMPG_DOUBLE: |
| 611 | GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 612 | break; |
| 613 | |
| 614 | case Instruction::CMP_LONG: |
| 615 | GenCmpLong(rl_dest, rl_src[0], rl_src[1]); |
| 616 | break; |
| 617 | |
| 618 | case Instruction::IF_EQ: |
| 619 | case Instruction::IF_NE: |
| 620 | case Instruction::IF_LT: |
| 621 | case Instruction::IF_GE: |
| 622 | case Instruction::IF_GT: |
| 623 | case Instruction::IF_LE: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 624 | LIR* taken = &label_list[bb->taken]; |
| 625 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 626 | // Result known at compile time? |
| 627 | if (rl_src[0].is_const && rl_src[1].is_const) { |
| 628 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), |
| 629 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 630 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 631 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 632 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 633 | GenSuspendTest(opt_flags); |
| 634 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 635 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 636 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 637 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 638 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 639 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 640 | GenSuspendTest(opt_flags); |
| 641 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 642 | GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 643 | } |
| 644 | break; |
| 645 | } |
| 646 | |
| 647 | case Instruction::IF_EQZ: |
| 648 | case Instruction::IF_NEZ: |
| 649 | case Instruction::IF_LTZ: |
| 650 | case Instruction::IF_GEZ: |
| 651 | case Instruction::IF_GTZ: |
| 652 | case Instruction::IF_LEZ: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 653 | LIR* taken = &label_list[bb->taken]; |
| 654 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 655 | // Result known at compile time? |
| 656 | if (rl_src[0].is_const) { |
| 657 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 658 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 659 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 660 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 661 | GenSuspendTest(opt_flags); |
| 662 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 663 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 664 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 665 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 666 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 667 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 668 | GenSuspendTest(opt_flags); |
| 669 | } |
| 670 | GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through); |
| 671 | } |
| 672 | break; |
| 673 | } |
| 674 | |
| 675 | case Instruction::AGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 676 | GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3); |
| 677 | break; |
| 678 | case Instruction::AGET_OBJECT: |
| 679 | GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 680 | break; |
| 681 | case Instruction::AGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 682 | GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 683 | break; |
| 684 | case Instruction::AGET_BOOLEAN: |
| 685 | GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 686 | break; |
| 687 | case Instruction::AGET_BYTE: |
| 688 | GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 689 | break; |
| 690 | case Instruction::AGET_CHAR: |
| 691 | GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 692 | break; |
| 693 | case Instruction::AGET_SHORT: |
| 694 | GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 695 | break; |
| 696 | case Instruction::APUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 697 | GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 698 | break; |
| 699 | case Instruction::APUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 700 | GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 701 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 702 | case Instruction::APUT_OBJECT: { |
| 703 | bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]); |
| 704 | bool is_safe = is_null; // Always safe to store null. |
| 705 | if (!is_safe) { |
| 706 | // Check safety from verifier type information. |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 707 | const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit(); |
| 708 | is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 709 | } |
| 710 | if (is_null || is_safe) { |
| 711 | // Store of constant null doesn't require an assignability test and can be generated inline |
| 712 | // without fixed register usage or a card mark. |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 713 | GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 714 | } else { |
| 715 | GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]); |
| 716 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 717 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 718 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 719 | case Instruction::APUT_SHORT: |
| 720 | case Instruction::APUT_CHAR: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 721 | GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 722 | break; |
| 723 | case Instruction::APUT_BYTE: |
| 724 | case Instruction::APUT_BOOLEAN: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 725 | GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 726 | break; |
| 727 | |
| 728 | case Instruction::IGET_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 729 | GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 730 | break; |
| 731 | |
| 732 | case Instruction::IGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 733 | GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 734 | break; |
| 735 | |
| 736 | case Instruction::IGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 737 | GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 738 | break; |
| 739 | |
| 740 | case Instruction::IGET_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 741 | GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 742 | break; |
| 743 | |
| 744 | case Instruction::IGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 745 | GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 746 | break; |
| 747 | |
| 748 | case Instruction::IGET_BOOLEAN: |
| 749 | case Instruction::IGET_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 750 | GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 751 | break; |
| 752 | |
| 753 | case Instruction::IPUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 754 | GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 755 | break; |
| 756 | |
| 757 | case Instruction::IPUT_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 758 | GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 759 | break; |
| 760 | |
| 761 | case Instruction::IPUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 762 | GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 763 | break; |
| 764 | |
| 765 | case Instruction::IPUT_BOOLEAN: |
| 766 | case Instruction::IPUT_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 767 | GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 768 | break; |
| 769 | |
| 770 | case Instruction::IPUT_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 771 | GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 772 | break; |
| 773 | |
| 774 | case Instruction::IPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 775 | GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 776 | break; |
| 777 | |
| 778 | case Instruction::SGET_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 779 | GenSget(mir, rl_dest, false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 780 | break; |
| 781 | case Instruction::SGET: |
| 782 | case Instruction::SGET_BOOLEAN: |
| 783 | case Instruction::SGET_BYTE: |
| 784 | case Instruction::SGET_CHAR: |
| 785 | case Instruction::SGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 786 | GenSget(mir, rl_dest, false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 787 | break; |
| 788 | |
| 789 | case Instruction::SGET_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 790 | GenSget(mir, rl_dest, true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 791 | break; |
| 792 | |
| 793 | case Instruction::SPUT_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 794 | GenSput(mir, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 795 | break; |
| 796 | |
| 797 | case Instruction::SPUT: |
| 798 | case Instruction::SPUT_BOOLEAN: |
| 799 | case Instruction::SPUT_BYTE: |
| 800 | case Instruction::SPUT_CHAR: |
| 801 | case Instruction::SPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 802 | GenSput(mir, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 803 | break; |
| 804 | |
| 805 | case Instruction::SPUT_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 806 | GenSput(mir, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 807 | break; |
| 808 | |
| 809 | case Instruction::INVOKE_STATIC_RANGE: |
| 810 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 811 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 812 | // If the invocation is not inlined, we can assume there is already a |
| 813 | // suspend check at the return site |
| 814 | mir_graph_->AppendGenSuspendTestList(bb); |
| 815 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 816 | break; |
| 817 | case Instruction::INVOKE_STATIC: |
| 818 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 819 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 820 | mir_graph_->AppendGenSuspendTestList(bb); |
| 821 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 822 | break; |
| 823 | |
| 824 | case Instruction::INVOKE_DIRECT: |
| 825 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 826 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 827 | mir_graph_->AppendGenSuspendTestList(bb); |
| 828 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 829 | break; |
| 830 | case Instruction::INVOKE_DIRECT_RANGE: |
| 831 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 832 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 833 | mir_graph_->AppendGenSuspendTestList(bb); |
| 834 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 835 | break; |
| 836 | |
| 837 | case Instruction::INVOKE_VIRTUAL: |
| 838 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 839 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 840 | mir_graph_->AppendGenSuspendTestList(bb); |
| 841 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 842 | break; |
| 843 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 844 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 845 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 846 | mir_graph_->AppendGenSuspendTestList(bb); |
| 847 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 848 | break; |
| 849 | |
| 850 | case Instruction::INVOKE_SUPER: |
| 851 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 852 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 853 | mir_graph_->AppendGenSuspendTestList(bb); |
| 854 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 855 | break; |
| 856 | case Instruction::INVOKE_SUPER_RANGE: |
| 857 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 858 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 859 | mir_graph_->AppendGenSuspendTestList(bb); |
| 860 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 861 | break; |
| 862 | |
| 863 | case Instruction::INVOKE_INTERFACE: |
| 864 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 865 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 866 | mir_graph_->AppendGenSuspendTestList(bb); |
| 867 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 868 | break; |
| 869 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 870 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 871 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 872 | mir_graph_->AppendGenSuspendTestList(bb); |
| 873 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 874 | break; |
| 875 | |
| 876 | case Instruction::NEG_INT: |
| 877 | case Instruction::NOT_INT: |
| 878 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 879 | break; |
| 880 | |
| 881 | case Instruction::NEG_LONG: |
| 882 | case Instruction::NOT_LONG: |
| 883 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 884 | break; |
| 885 | |
| 886 | case Instruction::NEG_FLOAT: |
| 887 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 888 | break; |
| 889 | |
| 890 | case Instruction::NEG_DOUBLE: |
| 891 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 892 | break; |
| 893 | |
| 894 | case Instruction::INT_TO_LONG: |
| 895 | GenIntToLong(rl_dest, rl_src[0]); |
| 896 | break; |
| 897 | |
| 898 | case Instruction::LONG_TO_INT: |
| 899 | rl_src[0] = UpdateLocWide(rl_src[0]); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 900 | rl_src[0] = NarrowRegLoc(rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 901 | StoreValue(rl_dest, rl_src[0]); |
| 902 | break; |
| 903 | |
| 904 | case Instruction::INT_TO_BYTE: |
| 905 | case Instruction::INT_TO_SHORT: |
| 906 | case Instruction::INT_TO_CHAR: |
| 907 | GenIntNarrowing(opcode, rl_dest, rl_src[0]); |
| 908 | break; |
| 909 | |
| 910 | case Instruction::INT_TO_FLOAT: |
| 911 | case Instruction::INT_TO_DOUBLE: |
| 912 | case Instruction::LONG_TO_FLOAT: |
| 913 | case Instruction::LONG_TO_DOUBLE: |
| 914 | case Instruction::FLOAT_TO_INT: |
| 915 | case Instruction::FLOAT_TO_LONG: |
| 916 | case Instruction::FLOAT_TO_DOUBLE: |
| 917 | case Instruction::DOUBLE_TO_INT: |
| 918 | case Instruction::DOUBLE_TO_LONG: |
| 919 | case Instruction::DOUBLE_TO_FLOAT: |
| 920 | GenConversion(opcode, rl_dest, rl_src[0]); |
| 921 | break; |
| 922 | |
| 923 | |
| 924 | case Instruction::ADD_INT: |
| 925 | case Instruction::ADD_INT_2ADDR: |
| 926 | case Instruction::MUL_INT: |
| 927 | case Instruction::MUL_INT_2ADDR: |
| 928 | case Instruction::AND_INT: |
| 929 | case Instruction::AND_INT_2ADDR: |
| 930 | case Instruction::OR_INT: |
| 931 | case Instruction::OR_INT_2ADDR: |
| 932 | case Instruction::XOR_INT: |
| 933 | case Instruction::XOR_INT_2ADDR: |
| 934 | if (rl_src[0].is_const && |
| 935 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) { |
| 936 | GenArithOpIntLit(opcode, rl_dest, rl_src[1], |
| 937 | mir_graph_->ConstantValue(rl_src[0].orig_sreg)); |
| 938 | } else if (rl_src[1].is_const && |
| 939 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 940 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], |
| 941 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
| 942 | } else { |
| 943 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 944 | } |
| 945 | break; |
| 946 | |
| 947 | case Instruction::SUB_INT: |
| 948 | case Instruction::SUB_INT_2ADDR: |
| 949 | case Instruction::DIV_INT: |
| 950 | case Instruction::DIV_INT_2ADDR: |
| 951 | case Instruction::REM_INT: |
| 952 | case Instruction::REM_INT_2ADDR: |
| 953 | case Instruction::SHL_INT: |
| 954 | case Instruction::SHL_INT_2ADDR: |
| 955 | case Instruction::SHR_INT: |
| 956 | case Instruction::SHR_INT_2ADDR: |
| 957 | case Instruction::USHR_INT: |
| 958 | case Instruction::USHR_INT_2ADDR: |
| 959 | if (rl_src[1].is_const && |
| 960 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 961 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1])); |
| 962 | } else { |
| 963 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 964 | } |
| 965 | break; |
| 966 | |
| 967 | case Instruction::ADD_LONG: |
| 968 | case Instruction::SUB_LONG: |
| 969 | case Instruction::AND_LONG: |
| 970 | case Instruction::OR_LONG: |
| 971 | case Instruction::XOR_LONG: |
| 972 | case Instruction::ADD_LONG_2ADDR: |
| 973 | case Instruction::SUB_LONG_2ADDR: |
| 974 | case Instruction::AND_LONG_2ADDR: |
| 975 | case Instruction::OR_LONG_2ADDR: |
| 976 | case Instruction::XOR_LONG_2ADDR: |
| 977 | if (rl_src[0].is_const || rl_src[1].is_const) { |
| 978 | GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 979 | break; |
| 980 | } |
| 981 | // Note: intentional fallthrough. |
| 982 | |
| 983 | case Instruction::MUL_LONG: |
| 984 | case Instruction::DIV_LONG: |
| 985 | case Instruction::REM_LONG: |
| 986 | case Instruction::MUL_LONG_2ADDR: |
| 987 | case Instruction::DIV_LONG_2ADDR: |
| 988 | case Instruction::REM_LONG_2ADDR: |
| 989 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 990 | break; |
| 991 | |
| 992 | case Instruction::SHL_LONG: |
| 993 | case Instruction::SHR_LONG: |
| 994 | case Instruction::USHR_LONG: |
| 995 | case Instruction::SHL_LONG_2ADDR: |
| 996 | case Instruction::SHR_LONG_2ADDR: |
| 997 | case Instruction::USHR_LONG_2ADDR: |
| 998 | if (rl_src[1].is_const) { |
| 999 | GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1000 | } else { |
| 1001 | GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1002 | } |
| 1003 | break; |
| 1004 | |
| 1005 | case Instruction::ADD_FLOAT: |
| 1006 | case Instruction::SUB_FLOAT: |
| 1007 | case Instruction::MUL_FLOAT: |
| 1008 | case Instruction::DIV_FLOAT: |
| 1009 | case Instruction::REM_FLOAT: |
| 1010 | case Instruction::ADD_FLOAT_2ADDR: |
| 1011 | case Instruction::SUB_FLOAT_2ADDR: |
| 1012 | case Instruction::MUL_FLOAT_2ADDR: |
| 1013 | case Instruction::DIV_FLOAT_2ADDR: |
| 1014 | case Instruction::REM_FLOAT_2ADDR: |
| 1015 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1016 | break; |
| 1017 | |
| 1018 | case Instruction::ADD_DOUBLE: |
| 1019 | case Instruction::SUB_DOUBLE: |
| 1020 | case Instruction::MUL_DOUBLE: |
| 1021 | case Instruction::DIV_DOUBLE: |
| 1022 | case Instruction::REM_DOUBLE: |
| 1023 | case Instruction::ADD_DOUBLE_2ADDR: |
| 1024 | case Instruction::SUB_DOUBLE_2ADDR: |
| 1025 | case Instruction::MUL_DOUBLE_2ADDR: |
| 1026 | case Instruction::DIV_DOUBLE_2ADDR: |
| 1027 | case Instruction::REM_DOUBLE_2ADDR: |
| 1028 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1029 | break; |
| 1030 | |
| 1031 | case Instruction::RSUB_INT: |
| 1032 | case Instruction::ADD_INT_LIT16: |
| 1033 | case Instruction::MUL_INT_LIT16: |
| 1034 | case Instruction::DIV_INT_LIT16: |
| 1035 | case Instruction::REM_INT_LIT16: |
| 1036 | case Instruction::AND_INT_LIT16: |
| 1037 | case Instruction::OR_INT_LIT16: |
| 1038 | case Instruction::XOR_INT_LIT16: |
| 1039 | case Instruction::ADD_INT_LIT8: |
| 1040 | case Instruction::RSUB_INT_LIT8: |
| 1041 | case Instruction::MUL_INT_LIT8: |
| 1042 | case Instruction::DIV_INT_LIT8: |
| 1043 | case Instruction::REM_INT_LIT8: |
| 1044 | case Instruction::AND_INT_LIT8: |
| 1045 | case Instruction::OR_INT_LIT8: |
| 1046 | case Instruction::XOR_INT_LIT8: |
| 1047 | case Instruction::SHL_INT_LIT8: |
| 1048 | case Instruction::SHR_INT_LIT8: |
| 1049 | case Instruction::USHR_INT_LIT8: |
| 1050 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC); |
| 1051 | break; |
| 1052 | |
| 1053 | default: |
| 1054 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 1055 | } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 1056 | DCHECK(CheckCorePoolSanity()); |
Brian Carlstrom | 1895ea3 | 2013-07-18 13:28:37 -0700 | [diff] [blame] | 1057 | } // NOLINT(readability/fn_size) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1058 | |
| 1059 | // Process extended MIR instructions |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1060 | void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1061 | switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) { |
| 1062 | case kMirOpCopy: { |
| 1063 | RegLocation rl_src = mir_graph_->GetSrc(mir, 0); |
| 1064 | RegLocation rl_dest = mir_graph_->GetDest(mir); |
| 1065 | StoreValue(rl_dest, rl_src); |
| 1066 | break; |
| 1067 | } |
| 1068 | case kMirOpFusedCmplFloat: |
| 1069 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/); |
| 1070 | break; |
| 1071 | case kMirOpFusedCmpgFloat: |
| 1072 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/); |
| 1073 | break; |
| 1074 | case kMirOpFusedCmplDouble: |
| 1075 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/); |
| 1076 | break; |
| 1077 | case kMirOpFusedCmpgDouble: |
| 1078 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/); |
| 1079 | break; |
| 1080 | case kMirOpFusedCmpLong: |
| 1081 | GenFusedLongCmpBranch(bb, mir); |
| 1082 | break; |
| 1083 | case kMirOpSelect: |
| 1084 | GenSelect(bb, mir); |
| 1085 | break; |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1086 | case kMirOpPhi: |
| 1087 | case kMirOpNop: |
| 1088 | case kMirOpNullCheck: |
| 1089 | case kMirOpRangeCheck: |
| 1090 | case kMirOpDivZeroCheck: |
| 1091 | case kMirOpCheck: |
| 1092 | case kMirOpCheckPart2: |
| 1093 | // Ignore these known opcodes |
| 1094 | break; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1095 | default: |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1096 | // Give the backends a chance to handle unknown extended MIR opcodes. |
| 1097 | GenMachineSpecificExtendedMethodMIR(bb, mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1098 | break; |
| 1099 | } |
| 1100 | } |
| 1101 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1102 | void Mir2Lir::GenPrintLabel(MIR* mir) { |
| 1103 | // Mark the beginning of a Dalvik instruction for line tracking. |
| 1104 | if (cu_->verbose) { |
| 1105 | char* inst_str = mir_graph_->GetDalvikDisassembly(mir); |
| 1106 | MarkBoundary(mir->offset, inst_str); |
| 1107 | } |
| 1108 | } |
| 1109 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1110 | // Handle the content in each basic block. |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1111 | bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1112 | if (bb->block_type == kDead) return false; |
| 1113 | current_dalvik_offset_ = bb->start_offset; |
| 1114 | MIR* mir; |
| 1115 | int block_id = bb->id; |
| 1116 | |
| 1117 | block_label_list_[block_id].operands[0] = bb->start_offset; |
| 1118 | |
| 1119 | // Insert the block label. |
| 1120 | block_label_list_[block_id].opcode = kPseudoNormalBlockLabel; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1121 | block_label_list_[block_id].flags.fixup = kFixupLabel; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1122 | AppendLIR(&block_label_list_[block_id]); |
| 1123 | |
| 1124 | LIR* head_lir = NULL; |
| 1125 | |
| 1126 | // If this is a catch block, export the start address. |
| 1127 | if (bb->catch_entry) { |
| 1128 | head_lir = NewLIR0(kPseudoExportedPC); |
| 1129 | } |
| 1130 | |
| 1131 | // Free temp registers and reset redundant store tracking. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1132 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1133 | |
| 1134 | if (bb->block_type == kEntryBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1135 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1136 | int start_vreg = cu_->num_dalvik_registers - cu_->num_ins; |
| 1137 | GenEntrySequence(&mir_graph_->reg_location_[start_vreg], |
| 1138 | mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]); |
| 1139 | } else if (bb->block_type == kExitBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1140 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1141 | GenExitSequence(); |
| 1142 | } |
| 1143 | |
| 1144 | for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) { |
| 1145 | ResetRegPool(); |
| 1146 | if (cu_->disable_opt & (1 << kTrackLiveTemps)) { |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1147 | ClobberAllTemps(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1148 | // Reset temp allocation to minimize differences when A/B testing. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1149 | reg_pool_->ResetNextTemp(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1150 | } |
| 1151 | |
| 1152 | if (cu_->disable_opt & (1 << kSuppressLoads)) { |
| 1153 | ResetDefTracking(); |
| 1154 | } |
| 1155 | |
| 1156 | // Reset temp tracking sanity check. |
| 1157 | if (kIsDebugBuild) { |
| 1158 | live_sreg_ = INVALID_SREG; |
| 1159 | } |
| 1160 | |
| 1161 | current_dalvik_offset_ = mir->offset; |
| 1162 | int opcode = mir->dalvikInsn.opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1163 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1164 | GenPrintLabel(mir); |
| 1165 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1166 | // Remember the first LIR for this block. |
| 1167 | if (head_lir == NULL) { |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1168 | head_lir = &block_label_list_[bb->id]; |
| 1169 | // Set the first label as a scheduling barrier. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1170 | DCHECK(!head_lir->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1171 | head_lir->u.m.def_mask = &kEncodeAll; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1172 | } |
| 1173 | |
| 1174 | if (opcode == kMirOpCheck) { |
| 1175 | // Combine check and work halves of throwing instruction. |
| 1176 | MIR* work_half = mir->meta.throw_insn; |
| 1177 | mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode; |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1178 | mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1179 | opcode = work_half->dalvikInsn.opcode; |
| 1180 | SSARepresentation* ssa_rep = work_half->ssa_rep; |
| 1181 | work_half->ssa_rep = mir->ssa_rep; |
| 1182 | mir->ssa_rep = ssa_rep; |
| 1183 | work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2); |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1184 | work_half->meta.throw_insn = mir; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1185 | } |
| 1186 | |
Jean Christophe Beyler | 2ab40eb | 2014-06-02 09:03:14 -0700 | [diff] [blame] | 1187 | if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1188 | HandleExtendedMethodMIR(bb, mir); |
| 1189 | continue; |
| 1190 | } |
| 1191 | |
| 1192 | CompileDalvikInstruction(mir, bb, block_label_list_); |
| 1193 | } |
| 1194 | |
| 1195 | if (head_lir) { |
| 1196 | // Eliminate redundant loads/stores and delay stores into later slots. |
| 1197 | ApplyLocalOptimizations(head_lir, last_lir_insn_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1198 | } |
| 1199 | return false; |
| 1200 | } |
| 1201 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1202 | bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) { |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1203 | cu_->NewTimingSplit("SpecialMIR2LIR"); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1204 | // Find the first DalvikByteCode block. |
| 1205 | int num_reachable_blocks = mir_graph_->GetNumReachableBlocks(); |
| 1206 | BasicBlock*bb = NULL; |
| 1207 | for (int idx = 0; idx < num_reachable_blocks; idx++) { |
| 1208 | // TODO: no direct access of growable lists. |
| 1209 | int dfs_index = mir_graph_->GetDfsOrder()->Get(idx); |
| 1210 | bb = mir_graph_->GetBasicBlock(dfs_index); |
| 1211 | if (bb->block_type == kDalvikByteCode) { |
| 1212 | break; |
| 1213 | } |
| 1214 | } |
| 1215 | if (bb == NULL) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1216 | return false; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1217 | } |
| 1218 | DCHECK_EQ(bb->start_offset, 0); |
| 1219 | DCHECK(bb->first_mir_insn != NULL); |
| 1220 | |
| 1221 | // Get the first instruction. |
| 1222 | MIR* mir = bb->first_mir_insn; |
| 1223 | |
| 1224 | // Free temp registers and reset redundant store tracking. |
| 1225 | ResetRegPool(); |
| 1226 | ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1227 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1228 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1229 | return GenSpecialCase(bb, mir, special); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1230 | } |
| 1231 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1232 | void Mir2Lir::MethodMIR2LIR() { |
buzbee | a61f495 | 2013-08-23 14:27:06 -0700 | [diff] [blame] | 1233 | cu_->NewTimingSplit("MIR2LIR"); |
| 1234 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1235 | // Hold the labels of each block. |
| 1236 | block_label_list_ = |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 1237 | static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(), |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1238 | kArenaAllocLIR)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1239 | |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1240 | PreOrderDfsIterator iter(mir_graph_); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1241 | BasicBlock* curr_bb = iter.Next(); |
| 1242 | BasicBlock* next_bb = iter.Next(); |
| 1243 | while (curr_bb != NULL) { |
| 1244 | MethodBlockCodeGen(curr_bb); |
| 1245 | // If the fall_through block is no longer laid out consecutively, drop in a branch. |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1246 | BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through); |
| 1247 | if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) { |
| 1248 | OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1249 | } |
| 1250 | curr_bb = next_bb; |
| 1251 | do { |
| 1252 | next_bb = iter.Next(); |
| 1253 | } while ((next_bb != NULL) && (next_bb->block_type == kDead)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1254 | } |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1255 | HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1256 | } |
| 1257 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1258 | // |
| 1259 | // LIR Slow Path |
| 1260 | // |
| 1261 | |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1262 | LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) { |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1263 | m2l_->SetCurrentDexPc(current_dex_pc_); |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1264 | LIR* target = m2l_->NewLIR0(opcode); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1265 | fromfast_->target = target; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1266 | return target; |
| 1267 | } |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1268 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1269 | |
| 1270 | void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, |
| 1271 | bool fail, bool report) |
| 1272 | const { |
| 1273 | if (rs.Valid()) { |
| 1274 | if (ref == RefCheck::kCheckRef) { |
| 1275 | if (cu_->target64 && !rs.Is64Bit()) { |
| 1276 | if (fail) { |
| 1277 | CHECK(false) << "Reg storage not 64b for ref."; |
| 1278 | } else if (report) { |
| 1279 | LOG(WARNING) << "Reg storage not 64b for ref."; |
| 1280 | } |
| 1281 | } |
| 1282 | } |
| 1283 | if (wide == WidenessCheck::kCheckWide) { |
| 1284 | if (!rs.Is64Bit()) { |
| 1285 | if (fail) { |
| 1286 | CHECK(false) << "Reg storage not 64b for wide."; |
| 1287 | } else if (report) { |
| 1288 | LOG(WARNING) << "Reg storage not 64b for wide."; |
| 1289 | } |
| 1290 | } |
| 1291 | } |
| 1292 | // A tighter check would be nice, but for now soft-float will not check float at all. |
| 1293 | if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) { |
| 1294 | if (!rs.IsFloat()) { |
| 1295 | if (fail) { |
| 1296 | CHECK(false) << "Reg storage not float for fp."; |
| 1297 | } else if (report) { |
| 1298 | LOG(WARNING) << "Reg storage not float for fp."; |
| 1299 | } |
| 1300 | } |
| 1301 | } else if (fp == FPCheck::kCheckNotFP) { |
| 1302 | if (rs.IsFloat()) { |
| 1303 | if (fail) { |
| 1304 | CHECK(false) << "Reg storage float for not-fp."; |
| 1305 | } else if (report) { |
| 1306 | LOG(WARNING) << "Reg storage float for not-fp."; |
| 1307 | } |
| 1308 | } |
| 1309 | } |
| 1310 | } |
| 1311 | } |
| 1312 | |
| 1313 | void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const { |
| 1314 | // Regrettably can't use the fp part of rl, as that is not really indicative of where a value |
| 1315 | // will be stored. |
| 1316 | CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide, |
| 1317 | rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report); |
| 1318 | } |
| 1319 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1320 | } // namespace art |