Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dex/compiler_internals.h" |
| 18 | #include "dex/dataflow_iterator-inl.h" |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 19 | #include "dex/quick/dex_file_method_inliner.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "mir_to_lir-inl.h" |
| 21 | #include "object_utils.h" |
Ian Rogers | 02ed4c0 | 2013-09-06 13:10:04 -0700 | [diff] [blame] | 22 | #include "thread-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 23 | |
| 24 | namespace art { |
| 25 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 26 | RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) { |
| 27 | RegisterClass res; |
| 28 | switch (shorty_type) { |
| 29 | case 'L': |
| 30 | res = kRefReg; |
| 31 | break; |
| 32 | case 'F': |
| 33 | // Expected fallthrough. |
| 34 | case 'D': |
| 35 | res = kFPReg; |
| 36 | break; |
| 37 | default: |
| 38 | res = kCoreReg; |
| 39 | } |
| 40 | return res; |
| 41 | } |
| 42 | |
| 43 | RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) { |
| 44 | RegisterClass res; |
| 45 | if (loc.fp) { |
| 46 | DCHECK(!loc.ref) << "At most, one of ref/fp may be set"; |
| 47 | res = kFPReg; |
| 48 | } else if (loc.ref) { |
| 49 | res = kRefReg; |
| 50 | } else { |
| 51 | res = kCoreReg; |
| 52 | } |
| 53 | return res; |
| 54 | } |
| 55 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 56 | void Mir2Lir::LockArg(int in_position, bool wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 57 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 58 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 59 | RegStorage::InvalidReg(); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 60 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 61 | if (reg_arg_low.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 62 | LockTemp(reg_arg_low); |
| 63 | } |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 64 | if (reg_arg_high.Valid() && reg_arg_low != reg_arg_high) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 65 | LockTemp(reg_arg_high); |
| 66 | } |
| 67 | } |
| 68 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 69 | // TODO: simplify when 32-bit targets go hard-float. |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 70 | RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 71 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 72 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 73 | |
| 74 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 75 | /* |
| 76 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 77 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 78 | */ |
| 79 | offset += sizeof(uint32_t); |
| 80 | } |
| 81 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 82 | if (cu_->instruction_set == kX86_64) { |
| 83 | /* |
| 84 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 85 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 86 | */ |
| 87 | offset += sizeof(uint64_t); |
| 88 | } |
| 89 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 90 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 91 | RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position); |
| 92 | if (!reg_arg.Valid()) { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 93 | RegStorage new_reg = |
| 94 | wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class); |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 95 | LoadBaseDisp(TargetReg(kSp), offset, new_reg, wide ? k64 : k32); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 96 | return new_reg; |
| 97 | } else { |
| 98 | // Check if we need to copy the arg to a different reg_class. |
| 99 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 100 | if (wide) { |
| 101 | RegStorage new_reg = AllocTypedTempWide(false, reg_class); |
| 102 | OpRegCopyWide(new_reg, reg_arg); |
| 103 | reg_arg = new_reg; |
| 104 | } else { |
| 105 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 106 | OpRegCopy(new_reg, reg_arg); |
| 107 | reg_arg = new_reg; |
| 108 | } |
| 109 | } |
| 110 | } |
| 111 | return reg_arg; |
| 112 | } |
| 113 | |
| 114 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 115 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 116 | RegStorage::InvalidReg(); |
| 117 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 118 | // If the VR is wide and there is no register for high part, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 119 | if (wide && !reg_arg_high.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 120 | // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 121 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 122 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 123 | LoadBaseDisp(TargetReg(kSp), offset, new_regs, k64); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 124 | return new_regs; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 125 | } else { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 126 | // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory, |
| 127 | // i.e. the low part is in a core reg. Load the second part in a core reg as well for now. |
| 128 | DCHECK(!reg_arg_low.IsFloat()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 129 | reg_arg_high = AllocTemp(); |
| 130 | int offset_high = offset + sizeof(uint32_t); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 131 | Load32Disp(TargetReg(kSp), offset_high, reg_arg_high); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 132 | // Continue below to check the reg_class. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 133 | } |
| 134 | } |
| 135 | |
| 136 | // If the low part is not in a register yet, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 137 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 138 | // Assume that if the low part of a wide arg is passed in memory, so is the high part, |
| 139 | // thus we don't get here for wide args as it's handled above. Big-endian ABIs could |
| 140 | // conceivably break this assumption but Android supports only little-endian architectures. |
| 141 | DCHECK(!wide); |
| 142 | reg_arg_low = AllocTypedTemp(false, reg_class); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 143 | Load32Disp(TargetReg(kSp), offset, reg_arg_low); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 144 | return reg_arg_low; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 145 | } |
| 146 | |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 147 | RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low; |
| 148 | // Check if we need to copy the arg to a different reg_class. |
| 149 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 150 | if (wide) { |
| 151 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
| 152 | OpRegCopyWide(new_regs, reg_arg); |
| 153 | reg_arg = new_regs; |
| 154 | } else { |
| 155 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 156 | OpRegCopy(new_reg, reg_arg); |
| 157 | reg_arg = new_reg; |
| 158 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 159 | } |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 160 | return reg_arg; |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 161 | } |
| 162 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 163 | // TODO: simpilfy when 32-bit targets go hard float. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 164 | void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 165 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 166 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 167 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 168 | /* |
| 169 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 170 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 171 | */ |
| 172 | offset += sizeof(uint32_t); |
| 173 | } |
| 174 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 175 | if (cu_->instruction_set == kX86_64) { |
| 176 | /* |
| 177 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 178 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 179 | */ |
| 180 | offset += sizeof(uint64_t); |
| 181 | } |
| 182 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 183 | if (!rl_dest.wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 184 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 185 | if (reg.Valid()) { |
| 186 | OpRegCopy(rl_dest.reg, reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 187 | } else { |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 188 | Load32Disp(TargetReg(kSp), offset, rl_dest.reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 189 | } |
| 190 | } else { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 191 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 192 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 193 | if (reg.Valid()) { |
| 194 | OpRegCopy(rl_dest.reg, reg); |
| 195 | } else { |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 196 | LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 197 | } |
| 198 | return; |
| 199 | } |
| 200 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 201 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 202 | RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 203 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 204 | if (reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 205 | OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high)); |
| 206 | } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) { |
| 207 | OpRegCopy(rl_dest.reg, reg_arg_low); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 208 | int offset_high = offset + sizeof(uint32_t); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 209 | Load32Disp(TargetReg(kSp), offset_high, rl_dest.reg.GetHigh()); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 210 | } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 211 | OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 212 | Load32Disp(TargetReg(kSp), offset, rl_dest.reg.GetLow()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 213 | } else { |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 214 | LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 215 | } |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) { |
| 220 | // FastInstance() already checked by DexFileMethodInliner. |
| 221 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 222 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 223 | // The object is not "this" and has to be null-checked. |
| 224 | return false; |
| 225 | } |
| 226 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 227 | bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 228 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 229 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 230 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 231 | return false; |
| 232 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 233 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 234 | // Point of no return - no aborts after this |
| 235 | GenPrintLabel(mir); |
| 236 | LockArg(data.object_arg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 237 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 238 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 239 | RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 240 | RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 241 | RegStorage r_result = rl_dest.reg; |
| 242 | if (!RegClassMatches(reg_class, r_result)) { |
| 243 | r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class) |
| 244 | : AllocTypedTemp(rl_dest.fp, reg_class); |
| 245 | } |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 246 | if (data.is_volatile) { |
| 247 | LoadBaseDispVolatile(reg_obj, data.field_offset, r_result, size); |
| 248 | // Without context sensitive analysis, we must issue the most conservative barriers. |
| 249 | // In this case, either a load or store may follow so we issue both barriers. |
| 250 | GenMemBarrier(kLoadLoad); |
| 251 | GenMemBarrier(kLoadStore); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 252 | } else { |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 253 | LoadBaseDisp(reg_obj, data.field_offset, r_result, size); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 254 | } |
| 255 | if (r_result != rl_dest.reg) { |
| 256 | if (wide) { |
| 257 | OpRegCopyWide(rl_dest.reg, r_result); |
| 258 | } else { |
| 259 | OpRegCopy(rl_dest.reg, r_result); |
| 260 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 261 | } |
| 262 | return true; |
| 263 | } |
| 264 | |
| 265 | bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) { |
| 266 | // FastInstance() already checked by DexFileMethodInliner. |
| 267 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 268 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 269 | // The object is not "this" and has to be null-checked. |
| 270 | return false; |
| 271 | } |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 272 | if (data.return_arg_plus1 != 0u) { |
| 273 | // The setter returns a method argument which we don't support here. |
| 274 | return false; |
| 275 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 276 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 277 | bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 278 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 279 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 280 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 281 | return false; |
| 282 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 283 | |
| 284 | // Point of no return - no aborts after this |
| 285 | GenPrintLabel(mir); |
| 286 | LockArg(data.object_arg); |
| 287 | LockArg(data.src_arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 288 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 289 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
| 290 | RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide); |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 291 | if (data.is_volatile) { |
| 292 | // There might have been a store before this volatile one so insert StoreStore barrier. |
| 293 | GenMemBarrier(kStoreStore); |
| 294 | StoreBaseDispVolatile(reg_obj, data.field_offset, reg_src, size); |
| 295 | // A load might follow the volatile store so insert a StoreLoad barrier. |
| 296 | GenMemBarrier(kStoreLoad); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 297 | } else { |
Andreas Gampe | de68676 | 2014-06-24 18:42:06 +0000 | [diff] [blame] | 298 | StoreBaseDisp(reg_obj, data.field_offset, reg_src, size); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 299 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 300 | if (ref) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 301 | MarkGCCard(reg_src, reg_obj); |
| 302 | } |
| 303 | return true; |
| 304 | } |
| 305 | |
| 306 | bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) { |
| 307 | const InlineReturnArgData& data = special.d.return_data; |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 308 | bool wide = (data.is_wide != 0u); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 309 | |
| 310 | // Point of no return - no aborts after this |
| 311 | GenPrintLabel(mir); |
| 312 | LockArg(data.arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 313 | RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 314 | RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 315 | LoadArgDirect(data.arg, rl_dest); |
| 316 | return true; |
| 317 | } |
| 318 | |
| 319 | /* |
| 320 | * Special-case code generation for simple non-throwing leaf methods. |
| 321 | */ |
| 322 | bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) { |
| 323 | DCHECK(special.flags & kInlineSpecial); |
| 324 | current_dalvik_offset_ = mir->offset; |
| 325 | MIR* return_mir = nullptr; |
| 326 | bool successful = false; |
| 327 | |
| 328 | switch (special.opcode) { |
| 329 | case kInlineOpNop: |
| 330 | successful = true; |
| 331 | DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID); |
| 332 | return_mir = mir; |
| 333 | break; |
| 334 | case kInlineOpNonWideConst: { |
| 335 | successful = true; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 336 | RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0])); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 337 | GenPrintLabel(mir); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 338 | LoadConstant(rl_dest.reg, static_cast<int>(special.d.data)); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 339 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 340 | break; |
| 341 | } |
| 342 | case kInlineOpReturnArg: |
| 343 | successful = GenSpecialIdentity(mir, special); |
| 344 | return_mir = mir; |
| 345 | break; |
| 346 | case kInlineOpIGet: |
| 347 | successful = GenSpecialIGet(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 348 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 349 | break; |
| 350 | case kInlineOpIPut: |
| 351 | successful = GenSpecialIPut(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 352 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 353 | break; |
| 354 | default: |
| 355 | break; |
| 356 | } |
| 357 | |
| 358 | if (successful) { |
Vladimir Marko | 39d95e6 | 2014-02-28 12:51:24 +0000 | [diff] [blame] | 359 | if (kIsDebugBuild) { |
| 360 | // Clear unreachable catch entries. |
| 361 | mir_graph_->catches_.clear(); |
| 362 | } |
| 363 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 364 | // Handle verbosity for return MIR. |
| 365 | if (return_mir != nullptr) { |
| 366 | current_dalvik_offset_ = return_mir->offset; |
| 367 | // Not handling special identity case because it already generated code as part |
| 368 | // of the return. The label should have been added before any code was generated. |
| 369 | if (special.opcode != kInlineOpReturnArg) { |
| 370 | GenPrintLabel(return_mir); |
| 371 | } |
| 372 | } |
| 373 | GenSpecialExitSequence(); |
| 374 | |
| 375 | core_spill_mask_ = 0; |
| 376 | num_core_spills_ = 0; |
| 377 | fp_spill_mask_ = 0; |
| 378 | num_fp_spills_ = 0; |
| 379 | frame_size_ = 0; |
| 380 | core_vmap_table_.clear(); |
| 381 | fp_vmap_table_.clear(); |
| 382 | } |
| 383 | |
| 384 | return successful; |
| 385 | } |
| 386 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 387 | /* |
| 388 | * Target-independent code generation. Use only high-level |
| 389 | * load/store utilities here, or target-dependent genXX() handlers |
| 390 | * when necessary. |
| 391 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 392 | void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 393 | RegLocation rl_src[3]; |
| 394 | RegLocation rl_dest = mir_graph_->GetBadLoc(); |
| 395 | RegLocation rl_result = mir_graph_->GetBadLoc(); |
| 396 | Instruction::Code opcode = mir->dalvikInsn.opcode; |
| 397 | int opt_flags = mir->optimization_flags; |
| 398 | uint32_t vB = mir->dalvikInsn.vB; |
| 399 | uint32_t vC = mir->dalvikInsn.vC; |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 400 | DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:" |
| 401 | << std::hex << current_dalvik_offset_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 402 | |
| 403 | // Prep Src and Dest locations. |
| 404 | int next_sreg = 0; |
| 405 | int next_loc = 0; |
Jean Christophe Beyler | cc794c3 | 2014-05-02 09:34:13 -0700 | [diff] [blame] | 406 | uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 407 | rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc(); |
| 408 | if (attrs & DF_UA) { |
| 409 | if (attrs & DF_A_WIDE) { |
| 410 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 411 | next_sreg+= 2; |
| 412 | } else { |
| 413 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 414 | next_sreg++; |
| 415 | } |
| 416 | } |
| 417 | if (attrs & DF_UB) { |
| 418 | if (attrs & DF_B_WIDE) { |
| 419 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 420 | next_sreg+= 2; |
| 421 | } else { |
| 422 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 423 | next_sreg++; |
| 424 | } |
| 425 | } |
| 426 | if (attrs & DF_UC) { |
| 427 | if (attrs & DF_C_WIDE) { |
| 428 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 429 | } else { |
| 430 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 431 | } |
| 432 | } |
| 433 | if (attrs & DF_DA) { |
| 434 | if (attrs & DF_A_WIDE) { |
| 435 | rl_dest = mir_graph_->GetDestWide(mir); |
| 436 | } else { |
| 437 | rl_dest = mir_graph_->GetDest(mir); |
| 438 | } |
| 439 | } |
| 440 | switch (opcode) { |
| 441 | case Instruction::NOP: |
| 442 | break; |
| 443 | |
| 444 | case Instruction::MOVE_EXCEPTION: |
| 445 | GenMoveException(rl_dest); |
| 446 | break; |
| 447 | |
| 448 | case Instruction::RETURN_VOID: |
| 449 | if (((cu_->access_flags & kAccConstructor) != 0) && |
| 450 | cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file, |
| 451 | cu_->class_def_idx)) { |
| 452 | GenMemBarrier(kStoreStore); |
| 453 | } |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 454 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 455 | GenSuspendTest(opt_flags); |
| 456 | } |
| 457 | break; |
| 458 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 459 | case Instruction::RETURN_OBJECT: |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 460 | DCHECK(rl_src[0].ref); |
| 461 | // Intentional fallthrough. |
| 462 | case Instruction::RETURN: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 463 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 464 | GenSuspendTest(opt_flags); |
| 465 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 466 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 467 | StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 468 | break; |
| 469 | |
| 470 | case Instruction::RETURN_WIDE: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 471 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 472 | GenSuspendTest(opt_flags); |
| 473 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 474 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 475 | StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 476 | break; |
| 477 | |
| 478 | case Instruction::MOVE_RESULT_WIDE: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 479 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 480 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 481 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 482 | StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 483 | break; |
| 484 | |
| 485 | case Instruction::MOVE_RESULT: |
| 486 | case Instruction::MOVE_RESULT_OBJECT: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 487 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 488 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 489 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 490 | StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 491 | break; |
| 492 | |
| 493 | case Instruction::MOVE: |
| 494 | case Instruction::MOVE_OBJECT: |
| 495 | case Instruction::MOVE_16: |
| 496 | case Instruction::MOVE_OBJECT_16: |
| 497 | case Instruction::MOVE_FROM16: |
| 498 | case Instruction::MOVE_OBJECT_FROM16: |
| 499 | StoreValue(rl_dest, rl_src[0]); |
| 500 | break; |
| 501 | |
| 502 | case Instruction::MOVE_WIDE: |
| 503 | case Instruction::MOVE_WIDE_16: |
| 504 | case Instruction::MOVE_WIDE_FROM16: |
| 505 | StoreValueWide(rl_dest, rl_src[0]); |
| 506 | break; |
| 507 | |
| 508 | case Instruction::CONST: |
| 509 | case Instruction::CONST_4: |
| 510 | case Instruction::CONST_16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 511 | GenConst(rl_dest, vB); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 512 | break; |
| 513 | |
| 514 | case Instruction::CONST_HIGH16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 515 | GenConst(rl_dest, vB << 16); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 516 | break; |
| 517 | |
| 518 | case Instruction::CONST_WIDE_16: |
| 519 | case Instruction::CONST_WIDE_32: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 520 | GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 521 | break; |
| 522 | |
| 523 | case Instruction::CONST_WIDE: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 524 | GenConstWide(rl_dest, mir->dalvikInsn.vB_wide); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 525 | break; |
| 526 | |
| 527 | case Instruction::CONST_WIDE_HIGH16: |
| 528 | rl_result = EvalLoc(rl_dest, kAnyReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 529 | LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 530 | StoreValueWide(rl_dest, rl_result); |
| 531 | break; |
| 532 | |
| 533 | case Instruction::MONITOR_ENTER: |
| 534 | GenMonitorEnter(opt_flags, rl_src[0]); |
| 535 | break; |
| 536 | |
| 537 | case Instruction::MONITOR_EXIT: |
| 538 | GenMonitorExit(opt_flags, rl_src[0]); |
| 539 | break; |
| 540 | |
| 541 | case Instruction::CHECK_CAST: { |
| 542 | GenCheckCast(mir->offset, vB, rl_src[0]); |
| 543 | break; |
| 544 | } |
| 545 | case Instruction::INSTANCE_OF: |
| 546 | GenInstanceof(vC, rl_dest, rl_src[0]); |
| 547 | break; |
| 548 | |
| 549 | case Instruction::NEW_INSTANCE: |
| 550 | GenNewInstance(vB, rl_dest); |
| 551 | break; |
| 552 | |
| 553 | case Instruction::THROW: |
| 554 | GenThrow(rl_src[0]); |
| 555 | break; |
| 556 | |
| 557 | case Instruction::ARRAY_LENGTH: |
| 558 | int len_offset; |
| 559 | len_offset = mirror::Array::LengthOffset().Int32Value(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 560 | rl_src[0] = LoadValue(rl_src[0], kRefReg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 561 | GenNullCheck(rl_src[0].reg, opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 562 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 563 | Load32Disp(rl_src[0].reg, len_offset, rl_result.reg); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 564 | MarkPossibleNullPointerException(opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 565 | StoreValue(rl_dest, rl_result); |
| 566 | break; |
| 567 | |
| 568 | case Instruction::CONST_STRING: |
| 569 | case Instruction::CONST_STRING_JUMBO: |
| 570 | GenConstString(vB, rl_dest); |
| 571 | break; |
| 572 | |
| 573 | case Instruction::CONST_CLASS: |
| 574 | GenConstClass(vB, rl_dest); |
| 575 | break; |
| 576 | |
| 577 | case Instruction::FILL_ARRAY_DATA: |
| 578 | GenFillArrayData(vB, rl_src[0]); |
| 579 | break; |
| 580 | |
| 581 | case Instruction::FILLED_NEW_ARRAY: |
| 582 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 583 | false /* not range */)); |
| 584 | break; |
| 585 | |
| 586 | case Instruction::FILLED_NEW_ARRAY_RANGE: |
| 587 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 588 | true /* range */)); |
| 589 | break; |
| 590 | |
| 591 | case Instruction::NEW_ARRAY: |
| 592 | GenNewArray(vC, rl_dest, rl_src[0]); |
| 593 | break; |
| 594 | |
| 595 | case Instruction::GOTO: |
| 596 | case Instruction::GOTO_16: |
| 597 | case Instruction::GOTO_32: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 598 | if (mir_graph_->IsBackedge(bb, bb->taken) && |
| 599 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 600 | GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 601 | } else { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 602 | OpUnconditionalBranch(&label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 603 | } |
| 604 | break; |
| 605 | |
| 606 | case Instruction::PACKED_SWITCH: |
| 607 | GenPackedSwitch(mir, vB, rl_src[0]); |
| 608 | break; |
| 609 | |
| 610 | case Instruction::SPARSE_SWITCH: |
| 611 | GenSparseSwitch(mir, vB, rl_src[0]); |
| 612 | break; |
| 613 | |
| 614 | case Instruction::CMPL_FLOAT: |
| 615 | case Instruction::CMPG_FLOAT: |
| 616 | case Instruction::CMPL_DOUBLE: |
| 617 | case Instruction::CMPG_DOUBLE: |
| 618 | GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 619 | break; |
| 620 | |
| 621 | case Instruction::CMP_LONG: |
| 622 | GenCmpLong(rl_dest, rl_src[0], rl_src[1]); |
| 623 | break; |
| 624 | |
| 625 | case Instruction::IF_EQ: |
| 626 | case Instruction::IF_NE: |
| 627 | case Instruction::IF_LT: |
| 628 | case Instruction::IF_GE: |
| 629 | case Instruction::IF_GT: |
| 630 | case Instruction::IF_LE: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 631 | LIR* taken = &label_list[bb->taken]; |
| 632 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 633 | // Result known at compile time? |
| 634 | if (rl_src[0].is_const && rl_src[1].is_const) { |
| 635 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), |
| 636 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 637 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 638 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 639 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 640 | GenSuspendTest(opt_flags); |
| 641 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 642 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 643 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 644 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 645 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 646 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 647 | GenSuspendTest(opt_flags); |
| 648 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 649 | GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 650 | } |
| 651 | break; |
| 652 | } |
| 653 | |
| 654 | case Instruction::IF_EQZ: |
| 655 | case Instruction::IF_NEZ: |
| 656 | case Instruction::IF_LTZ: |
| 657 | case Instruction::IF_GEZ: |
| 658 | case Instruction::IF_GTZ: |
| 659 | case Instruction::IF_LEZ: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 660 | LIR* taken = &label_list[bb->taken]; |
| 661 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 662 | // Result known at compile time? |
| 663 | if (rl_src[0].is_const) { |
| 664 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 665 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 666 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 667 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 668 | GenSuspendTest(opt_flags); |
| 669 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 670 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 671 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 672 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 673 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 674 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 675 | GenSuspendTest(opt_flags); |
| 676 | } |
| 677 | GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through); |
| 678 | } |
| 679 | break; |
| 680 | } |
| 681 | |
| 682 | case Instruction::AGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 683 | GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3); |
| 684 | break; |
| 685 | case Instruction::AGET_OBJECT: |
| 686 | GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 687 | break; |
| 688 | case Instruction::AGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 689 | GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 690 | break; |
| 691 | case Instruction::AGET_BOOLEAN: |
| 692 | GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 693 | break; |
| 694 | case Instruction::AGET_BYTE: |
| 695 | GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 696 | break; |
| 697 | case Instruction::AGET_CHAR: |
| 698 | GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 699 | break; |
| 700 | case Instruction::AGET_SHORT: |
| 701 | GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 702 | break; |
| 703 | case Instruction::APUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 704 | GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 705 | break; |
| 706 | case Instruction::APUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 707 | GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 708 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 709 | case Instruction::APUT_OBJECT: { |
| 710 | bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]); |
| 711 | bool is_safe = is_null; // Always safe to store null. |
| 712 | if (!is_safe) { |
| 713 | // Check safety from verifier type information. |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 714 | const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit(); |
| 715 | is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 716 | } |
| 717 | if (is_null || is_safe) { |
| 718 | // Store of constant null doesn't require an assignability test and can be generated inline |
| 719 | // without fixed register usage or a card mark. |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 720 | GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 721 | } else { |
| 722 | GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]); |
| 723 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 724 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 725 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 726 | case Instruction::APUT_SHORT: |
| 727 | case Instruction::APUT_CHAR: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 728 | GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 729 | break; |
| 730 | case Instruction::APUT_BYTE: |
| 731 | case Instruction::APUT_BOOLEAN: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 732 | GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 733 | break; |
| 734 | |
| 735 | case Instruction::IGET_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 736 | GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 737 | break; |
| 738 | |
| 739 | case Instruction::IGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 740 | GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 741 | break; |
| 742 | |
| 743 | case Instruction::IGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 744 | GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 745 | break; |
| 746 | |
| 747 | case Instruction::IGET_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 748 | GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 749 | break; |
| 750 | |
| 751 | case Instruction::IGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 752 | GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 753 | break; |
| 754 | |
| 755 | case Instruction::IGET_BOOLEAN: |
| 756 | case Instruction::IGET_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 757 | GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 758 | break; |
| 759 | |
| 760 | case Instruction::IPUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 761 | GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 762 | break; |
| 763 | |
| 764 | case Instruction::IPUT_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 765 | GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 766 | break; |
| 767 | |
| 768 | case Instruction::IPUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 769 | GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 770 | break; |
| 771 | |
| 772 | case Instruction::IPUT_BOOLEAN: |
| 773 | case Instruction::IPUT_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 774 | GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 775 | break; |
| 776 | |
| 777 | case Instruction::IPUT_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 778 | GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 779 | break; |
| 780 | |
| 781 | case Instruction::IPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 782 | GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 783 | break; |
| 784 | |
| 785 | case Instruction::SGET_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 786 | GenSget(mir, rl_dest, false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 787 | break; |
| 788 | case Instruction::SGET: |
| 789 | case Instruction::SGET_BOOLEAN: |
| 790 | case Instruction::SGET_BYTE: |
| 791 | case Instruction::SGET_CHAR: |
| 792 | case Instruction::SGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 793 | GenSget(mir, rl_dest, false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 794 | break; |
| 795 | |
| 796 | case Instruction::SGET_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 797 | GenSget(mir, rl_dest, true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 798 | break; |
| 799 | |
| 800 | case Instruction::SPUT_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 801 | GenSput(mir, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 802 | break; |
| 803 | |
| 804 | case Instruction::SPUT: |
| 805 | case Instruction::SPUT_BOOLEAN: |
| 806 | case Instruction::SPUT_BYTE: |
| 807 | case Instruction::SPUT_CHAR: |
| 808 | case Instruction::SPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 809 | GenSput(mir, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 810 | break; |
| 811 | |
| 812 | case Instruction::SPUT_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 813 | GenSput(mir, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 814 | break; |
| 815 | |
| 816 | case Instruction::INVOKE_STATIC_RANGE: |
| 817 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 818 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 819 | // If the invocation is not inlined, we can assume there is already a |
| 820 | // suspend check at the return site |
| 821 | mir_graph_->AppendGenSuspendTestList(bb); |
| 822 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 823 | break; |
| 824 | case Instruction::INVOKE_STATIC: |
| 825 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 826 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 827 | mir_graph_->AppendGenSuspendTestList(bb); |
| 828 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 829 | break; |
| 830 | |
| 831 | case Instruction::INVOKE_DIRECT: |
| 832 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 833 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 834 | mir_graph_->AppendGenSuspendTestList(bb); |
| 835 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 836 | break; |
| 837 | case Instruction::INVOKE_DIRECT_RANGE: |
| 838 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 839 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 840 | mir_graph_->AppendGenSuspendTestList(bb); |
| 841 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 842 | break; |
| 843 | |
| 844 | case Instruction::INVOKE_VIRTUAL: |
| 845 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 846 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 847 | mir_graph_->AppendGenSuspendTestList(bb); |
| 848 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 849 | break; |
| 850 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 851 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 852 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 853 | mir_graph_->AppendGenSuspendTestList(bb); |
| 854 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 855 | break; |
| 856 | |
| 857 | case Instruction::INVOKE_SUPER: |
| 858 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 859 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 860 | mir_graph_->AppendGenSuspendTestList(bb); |
| 861 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 862 | break; |
| 863 | case Instruction::INVOKE_SUPER_RANGE: |
| 864 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 865 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 866 | mir_graph_->AppendGenSuspendTestList(bb); |
| 867 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 868 | break; |
| 869 | |
| 870 | case Instruction::INVOKE_INTERFACE: |
| 871 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 872 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 873 | mir_graph_->AppendGenSuspendTestList(bb); |
| 874 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 875 | break; |
| 876 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 877 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 878 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 879 | mir_graph_->AppendGenSuspendTestList(bb); |
| 880 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 881 | break; |
| 882 | |
| 883 | case Instruction::NEG_INT: |
| 884 | case Instruction::NOT_INT: |
| 885 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 886 | break; |
| 887 | |
| 888 | case Instruction::NEG_LONG: |
| 889 | case Instruction::NOT_LONG: |
| 890 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 891 | break; |
| 892 | |
| 893 | case Instruction::NEG_FLOAT: |
| 894 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 895 | break; |
| 896 | |
| 897 | case Instruction::NEG_DOUBLE: |
| 898 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 899 | break; |
| 900 | |
| 901 | case Instruction::INT_TO_LONG: |
| 902 | GenIntToLong(rl_dest, rl_src[0]); |
| 903 | break; |
| 904 | |
| 905 | case Instruction::LONG_TO_INT: |
| 906 | rl_src[0] = UpdateLocWide(rl_src[0]); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 907 | rl_src[0] = NarrowRegLoc(rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 908 | StoreValue(rl_dest, rl_src[0]); |
| 909 | break; |
| 910 | |
| 911 | case Instruction::INT_TO_BYTE: |
| 912 | case Instruction::INT_TO_SHORT: |
| 913 | case Instruction::INT_TO_CHAR: |
| 914 | GenIntNarrowing(opcode, rl_dest, rl_src[0]); |
| 915 | break; |
| 916 | |
| 917 | case Instruction::INT_TO_FLOAT: |
| 918 | case Instruction::INT_TO_DOUBLE: |
| 919 | case Instruction::LONG_TO_FLOAT: |
| 920 | case Instruction::LONG_TO_DOUBLE: |
| 921 | case Instruction::FLOAT_TO_INT: |
| 922 | case Instruction::FLOAT_TO_LONG: |
| 923 | case Instruction::FLOAT_TO_DOUBLE: |
| 924 | case Instruction::DOUBLE_TO_INT: |
| 925 | case Instruction::DOUBLE_TO_LONG: |
| 926 | case Instruction::DOUBLE_TO_FLOAT: |
| 927 | GenConversion(opcode, rl_dest, rl_src[0]); |
| 928 | break; |
| 929 | |
| 930 | |
| 931 | case Instruction::ADD_INT: |
| 932 | case Instruction::ADD_INT_2ADDR: |
| 933 | case Instruction::MUL_INT: |
| 934 | case Instruction::MUL_INT_2ADDR: |
| 935 | case Instruction::AND_INT: |
| 936 | case Instruction::AND_INT_2ADDR: |
| 937 | case Instruction::OR_INT: |
| 938 | case Instruction::OR_INT_2ADDR: |
| 939 | case Instruction::XOR_INT: |
| 940 | case Instruction::XOR_INT_2ADDR: |
| 941 | if (rl_src[0].is_const && |
| 942 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) { |
| 943 | GenArithOpIntLit(opcode, rl_dest, rl_src[1], |
| 944 | mir_graph_->ConstantValue(rl_src[0].orig_sreg)); |
| 945 | } else if (rl_src[1].is_const && |
| 946 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 947 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], |
| 948 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
| 949 | } else { |
| 950 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 951 | } |
| 952 | break; |
| 953 | |
| 954 | case Instruction::SUB_INT: |
| 955 | case Instruction::SUB_INT_2ADDR: |
| 956 | case Instruction::DIV_INT: |
| 957 | case Instruction::DIV_INT_2ADDR: |
| 958 | case Instruction::REM_INT: |
| 959 | case Instruction::REM_INT_2ADDR: |
| 960 | case Instruction::SHL_INT: |
| 961 | case Instruction::SHL_INT_2ADDR: |
| 962 | case Instruction::SHR_INT: |
| 963 | case Instruction::SHR_INT_2ADDR: |
| 964 | case Instruction::USHR_INT: |
| 965 | case Instruction::USHR_INT_2ADDR: |
| 966 | if (rl_src[1].is_const && |
| 967 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 968 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1])); |
| 969 | } else { |
| 970 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 971 | } |
| 972 | break; |
| 973 | |
| 974 | case Instruction::ADD_LONG: |
| 975 | case Instruction::SUB_LONG: |
| 976 | case Instruction::AND_LONG: |
| 977 | case Instruction::OR_LONG: |
| 978 | case Instruction::XOR_LONG: |
| 979 | case Instruction::ADD_LONG_2ADDR: |
| 980 | case Instruction::SUB_LONG_2ADDR: |
| 981 | case Instruction::AND_LONG_2ADDR: |
| 982 | case Instruction::OR_LONG_2ADDR: |
| 983 | case Instruction::XOR_LONG_2ADDR: |
| 984 | if (rl_src[0].is_const || rl_src[1].is_const) { |
| 985 | GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 986 | break; |
| 987 | } |
| 988 | // Note: intentional fallthrough. |
| 989 | |
| 990 | case Instruction::MUL_LONG: |
| 991 | case Instruction::DIV_LONG: |
| 992 | case Instruction::REM_LONG: |
| 993 | case Instruction::MUL_LONG_2ADDR: |
| 994 | case Instruction::DIV_LONG_2ADDR: |
| 995 | case Instruction::REM_LONG_2ADDR: |
| 996 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 997 | break; |
| 998 | |
| 999 | case Instruction::SHL_LONG: |
| 1000 | case Instruction::SHR_LONG: |
| 1001 | case Instruction::USHR_LONG: |
| 1002 | case Instruction::SHL_LONG_2ADDR: |
| 1003 | case Instruction::SHR_LONG_2ADDR: |
| 1004 | case Instruction::USHR_LONG_2ADDR: |
| 1005 | if (rl_src[1].is_const) { |
| 1006 | GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1007 | } else { |
| 1008 | GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1009 | } |
| 1010 | break; |
| 1011 | |
| 1012 | case Instruction::ADD_FLOAT: |
| 1013 | case Instruction::SUB_FLOAT: |
| 1014 | case Instruction::MUL_FLOAT: |
| 1015 | case Instruction::DIV_FLOAT: |
| 1016 | case Instruction::REM_FLOAT: |
| 1017 | case Instruction::ADD_FLOAT_2ADDR: |
| 1018 | case Instruction::SUB_FLOAT_2ADDR: |
| 1019 | case Instruction::MUL_FLOAT_2ADDR: |
| 1020 | case Instruction::DIV_FLOAT_2ADDR: |
| 1021 | case Instruction::REM_FLOAT_2ADDR: |
| 1022 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1023 | break; |
| 1024 | |
| 1025 | case Instruction::ADD_DOUBLE: |
| 1026 | case Instruction::SUB_DOUBLE: |
| 1027 | case Instruction::MUL_DOUBLE: |
| 1028 | case Instruction::DIV_DOUBLE: |
| 1029 | case Instruction::REM_DOUBLE: |
| 1030 | case Instruction::ADD_DOUBLE_2ADDR: |
| 1031 | case Instruction::SUB_DOUBLE_2ADDR: |
| 1032 | case Instruction::MUL_DOUBLE_2ADDR: |
| 1033 | case Instruction::DIV_DOUBLE_2ADDR: |
| 1034 | case Instruction::REM_DOUBLE_2ADDR: |
| 1035 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1036 | break; |
| 1037 | |
| 1038 | case Instruction::RSUB_INT: |
| 1039 | case Instruction::ADD_INT_LIT16: |
| 1040 | case Instruction::MUL_INT_LIT16: |
| 1041 | case Instruction::DIV_INT_LIT16: |
| 1042 | case Instruction::REM_INT_LIT16: |
| 1043 | case Instruction::AND_INT_LIT16: |
| 1044 | case Instruction::OR_INT_LIT16: |
| 1045 | case Instruction::XOR_INT_LIT16: |
| 1046 | case Instruction::ADD_INT_LIT8: |
| 1047 | case Instruction::RSUB_INT_LIT8: |
| 1048 | case Instruction::MUL_INT_LIT8: |
| 1049 | case Instruction::DIV_INT_LIT8: |
| 1050 | case Instruction::REM_INT_LIT8: |
| 1051 | case Instruction::AND_INT_LIT8: |
| 1052 | case Instruction::OR_INT_LIT8: |
| 1053 | case Instruction::XOR_INT_LIT8: |
| 1054 | case Instruction::SHL_INT_LIT8: |
| 1055 | case Instruction::SHR_INT_LIT8: |
| 1056 | case Instruction::USHR_INT_LIT8: |
| 1057 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC); |
| 1058 | break; |
| 1059 | |
| 1060 | default: |
| 1061 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 1062 | } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 1063 | DCHECK(CheckCorePoolSanity()); |
Brian Carlstrom | 1895ea3 | 2013-07-18 13:28:37 -0700 | [diff] [blame] | 1064 | } // NOLINT(readability/fn_size) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1065 | |
| 1066 | // Process extended MIR instructions |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1067 | void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1068 | switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) { |
| 1069 | case kMirOpCopy: { |
| 1070 | RegLocation rl_src = mir_graph_->GetSrc(mir, 0); |
| 1071 | RegLocation rl_dest = mir_graph_->GetDest(mir); |
| 1072 | StoreValue(rl_dest, rl_src); |
| 1073 | break; |
| 1074 | } |
| 1075 | case kMirOpFusedCmplFloat: |
| 1076 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/); |
| 1077 | break; |
| 1078 | case kMirOpFusedCmpgFloat: |
| 1079 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/); |
| 1080 | break; |
| 1081 | case kMirOpFusedCmplDouble: |
| 1082 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/); |
| 1083 | break; |
| 1084 | case kMirOpFusedCmpgDouble: |
| 1085 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/); |
| 1086 | break; |
| 1087 | case kMirOpFusedCmpLong: |
| 1088 | GenFusedLongCmpBranch(bb, mir); |
| 1089 | break; |
| 1090 | case kMirOpSelect: |
| 1091 | GenSelect(bb, mir); |
| 1092 | break; |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1093 | case kMirOpPhi: |
| 1094 | case kMirOpNop: |
| 1095 | case kMirOpNullCheck: |
| 1096 | case kMirOpRangeCheck: |
| 1097 | case kMirOpDivZeroCheck: |
| 1098 | case kMirOpCheck: |
| 1099 | case kMirOpCheckPart2: |
| 1100 | // Ignore these known opcodes |
| 1101 | break; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1102 | default: |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1103 | // Give the backends a chance to handle unknown extended MIR opcodes. |
| 1104 | GenMachineSpecificExtendedMethodMIR(bb, mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1105 | break; |
| 1106 | } |
| 1107 | } |
| 1108 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1109 | void Mir2Lir::GenPrintLabel(MIR* mir) { |
| 1110 | // Mark the beginning of a Dalvik instruction for line tracking. |
| 1111 | if (cu_->verbose) { |
| 1112 | char* inst_str = mir_graph_->GetDalvikDisassembly(mir); |
| 1113 | MarkBoundary(mir->offset, inst_str); |
| 1114 | } |
| 1115 | } |
| 1116 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1117 | // Handle the content in each basic block. |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1118 | bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1119 | if (bb->block_type == kDead) return false; |
| 1120 | current_dalvik_offset_ = bb->start_offset; |
| 1121 | MIR* mir; |
| 1122 | int block_id = bb->id; |
| 1123 | |
| 1124 | block_label_list_[block_id].operands[0] = bb->start_offset; |
| 1125 | |
| 1126 | // Insert the block label. |
| 1127 | block_label_list_[block_id].opcode = kPseudoNormalBlockLabel; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1128 | block_label_list_[block_id].flags.fixup = kFixupLabel; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1129 | AppendLIR(&block_label_list_[block_id]); |
| 1130 | |
| 1131 | LIR* head_lir = NULL; |
| 1132 | |
| 1133 | // If this is a catch block, export the start address. |
| 1134 | if (bb->catch_entry) { |
| 1135 | head_lir = NewLIR0(kPseudoExportedPC); |
| 1136 | } |
| 1137 | |
| 1138 | // Free temp registers and reset redundant store tracking. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1139 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1140 | |
| 1141 | if (bb->block_type == kEntryBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1142 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1143 | int start_vreg = cu_->num_dalvik_registers - cu_->num_ins; |
| 1144 | GenEntrySequence(&mir_graph_->reg_location_[start_vreg], |
| 1145 | mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]); |
| 1146 | } else if (bb->block_type == kExitBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1147 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1148 | GenExitSequence(); |
| 1149 | } |
| 1150 | |
| 1151 | for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) { |
| 1152 | ResetRegPool(); |
| 1153 | if (cu_->disable_opt & (1 << kTrackLiveTemps)) { |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1154 | ClobberAllTemps(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1155 | // Reset temp allocation to minimize differences when A/B testing. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1156 | reg_pool_->ResetNextTemp(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1157 | } |
| 1158 | |
| 1159 | if (cu_->disable_opt & (1 << kSuppressLoads)) { |
| 1160 | ResetDefTracking(); |
| 1161 | } |
| 1162 | |
| 1163 | // Reset temp tracking sanity check. |
| 1164 | if (kIsDebugBuild) { |
| 1165 | live_sreg_ = INVALID_SREG; |
| 1166 | } |
| 1167 | |
| 1168 | current_dalvik_offset_ = mir->offset; |
| 1169 | int opcode = mir->dalvikInsn.opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1170 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1171 | GenPrintLabel(mir); |
| 1172 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1173 | // Remember the first LIR for this block. |
| 1174 | if (head_lir == NULL) { |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1175 | head_lir = &block_label_list_[bb->id]; |
| 1176 | // Set the first label as a scheduling barrier. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1177 | DCHECK(!head_lir->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1178 | head_lir->u.m.def_mask = &kEncodeAll; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1179 | } |
| 1180 | |
| 1181 | if (opcode == kMirOpCheck) { |
| 1182 | // Combine check and work halves of throwing instruction. |
| 1183 | MIR* work_half = mir->meta.throw_insn; |
| 1184 | mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode; |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1185 | mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1186 | opcode = work_half->dalvikInsn.opcode; |
| 1187 | SSARepresentation* ssa_rep = work_half->ssa_rep; |
| 1188 | work_half->ssa_rep = mir->ssa_rep; |
| 1189 | mir->ssa_rep = ssa_rep; |
| 1190 | work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2); |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1191 | work_half->meta.throw_insn = mir; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1192 | } |
| 1193 | |
buzbee | 35ba7f3 | 2014-05-31 08:59:01 -0700 | [diff] [blame] | 1194 | if (MIRGraph::IsPseudoMirOp(opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1195 | HandleExtendedMethodMIR(bb, mir); |
| 1196 | continue; |
| 1197 | } |
| 1198 | |
| 1199 | CompileDalvikInstruction(mir, bb, block_label_list_); |
| 1200 | } |
| 1201 | |
| 1202 | if (head_lir) { |
| 1203 | // Eliminate redundant loads/stores and delay stores into later slots. |
| 1204 | ApplyLocalOptimizations(head_lir, last_lir_insn_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1205 | } |
| 1206 | return false; |
| 1207 | } |
| 1208 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1209 | bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) { |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1210 | cu_->NewTimingSplit("SpecialMIR2LIR"); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1211 | // Find the first DalvikByteCode block. |
| 1212 | int num_reachable_blocks = mir_graph_->GetNumReachableBlocks(); |
| 1213 | BasicBlock*bb = NULL; |
| 1214 | for (int idx = 0; idx < num_reachable_blocks; idx++) { |
| 1215 | // TODO: no direct access of growable lists. |
| 1216 | int dfs_index = mir_graph_->GetDfsOrder()->Get(idx); |
| 1217 | bb = mir_graph_->GetBasicBlock(dfs_index); |
| 1218 | if (bb->block_type == kDalvikByteCode) { |
| 1219 | break; |
| 1220 | } |
| 1221 | } |
| 1222 | if (bb == NULL) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1223 | return false; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1224 | } |
| 1225 | DCHECK_EQ(bb->start_offset, 0); |
| 1226 | DCHECK(bb->first_mir_insn != NULL); |
| 1227 | |
| 1228 | // Get the first instruction. |
| 1229 | MIR* mir = bb->first_mir_insn; |
| 1230 | |
| 1231 | // Free temp registers and reset redundant store tracking. |
| 1232 | ResetRegPool(); |
| 1233 | ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1234 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1235 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1236 | return GenSpecialCase(bb, mir, special); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1237 | } |
| 1238 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1239 | void Mir2Lir::MethodMIR2LIR() { |
buzbee | a61f495 | 2013-08-23 14:27:06 -0700 | [diff] [blame] | 1240 | cu_->NewTimingSplit("MIR2LIR"); |
| 1241 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1242 | // Hold the labels of each block. |
| 1243 | block_label_list_ = |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 1244 | static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(), |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1245 | kArenaAllocLIR)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1246 | |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1247 | PreOrderDfsIterator iter(mir_graph_); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1248 | BasicBlock* curr_bb = iter.Next(); |
| 1249 | BasicBlock* next_bb = iter.Next(); |
| 1250 | while (curr_bb != NULL) { |
| 1251 | MethodBlockCodeGen(curr_bb); |
| 1252 | // If the fall_through block is no longer laid out consecutively, drop in a branch. |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1253 | BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through); |
| 1254 | if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) { |
| 1255 | OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1256 | } |
| 1257 | curr_bb = next_bb; |
| 1258 | do { |
| 1259 | next_bb = iter.Next(); |
| 1260 | } while ((next_bb != NULL) && (next_bb->block_type == kDead)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1261 | } |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1262 | HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1263 | } |
| 1264 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1265 | // |
| 1266 | // LIR Slow Path |
| 1267 | // |
| 1268 | |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1269 | LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) { |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1270 | m2l_->SetCurrentDexPc(current_dex_pc_); |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1271 | LIR* target = m2l_->NewLIR0(opcode); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1272 | fromfast_->target = target; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1273 | return target; |
| 1274 | } |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1275 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1276 | } // namespace art |