blob: 961cd4f06b2bef864388cfc3fdc5e911dbd0dae7 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe0b9203e2015-01-22 20:39:27 -080017#include "mir_to_lir-inl.h"
18
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080020#include "dex/quick/dex_file_method_inliner.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080021#include "driver/compiler_driver.h"
Fred Shih37f05ef2014-07-16 18:38:08 -070022#include "primitive.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024
25namespace art {
26
Vladimir Marko6ce3eba2015-02-16 13:05:59 +000027class Mir2Lir::SpecialSuspendCheckSlowPath : public Mir2Lir::LIRSlowPath {
28 public:
29 SpecialSuspendCheckSlowPath(Mir2Lir* m2l, LIR* branch, LIR* cont)
Vladimir Marko0b40ecf2015-03-20 12:08:03 +000030 : LIRSlowPath(m2l, branch, cont),
Vladimir Marko6ce3eba2015-02-16 13:05:59 +000031 num_used_args_(0u) {
32 }
33
34 void PreserveArg(int in_position) {
35 // Avoid duplicates.
36 for (size_t i = 0; i != num_used_args_; ++i) {
37 if (used_args_[i] == in_position) {
38 return;
39 }
40 }
41 DCHECK_LT(num_used_args_, kMaxArgsToPreserve);
42 used_args_[num_used_args_] = in_position;
43 ++num_used_args_;
44 }
45
46 void Compile() OVERRIDE {
47 m2l_->ResetRegPool();
48 m2l_->ResetDefTracking();
49 GenerateTargetLabel(kPseudoSuspendTarget);
50
51 m2l_->LockCallTemps();
52
53 // Generate frame.
54 m2l_->GenSpecialEntryForSuspend();
55
56 // Spill all args.
57 for (size_t i = 0, end = m2l_->in_to_reg_storage_mapping_.GetEndMappedIn(); i < end;
58 i += m2l_->in_to_reg_storage_mapping_.GetShorty(i).IsWide() ? 2u : 1u) {
59 m2l_->SpillArg(i);
60 }
61
62 m2l_->FreeCallTemps();
63
64 // Do the actual suspend call to runtime.
65 m2l_->CallRuntimeHelper(kQuickTestSuspend, true);
66
67 m2l_->LockCallTemps();
68
69 // Unspill used regs. (Don't unspill unused args.)
70 for (size_t i = 0; i != num_used_args_; ++i) {
71 m2l_->UnspillArg(used_args_[i]);
72 }
73
74 // Pop the frame.
75 m2l_->GenSpecialExitForSuspend();
76
77 // Branch to the continue label.
78 DCHECK(cont_ != nullptr);
79 m2l_->OpUnconditionalBranch(cont_);
80
81 m2l_->FreeCallTemps();
82 }
83
84 private:
85 static constexpr size_t kMaxArgsToPreserve = 2u;
86 size_t num_used_args_;
87 int used_args_[kMaxArgsToPreserve];
88};
89
buzbeea0cd2d72014-06-01 09:33:49 -070090RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) {
91 RegisterClass res;
92 switch (shorty_type) {
93 case 'L':
94 res = kRefReg;
95 break;
96 case 'F':
97 // Expected fallthrough.
98 case 'D':
99 res = kFPReg;
100 break;
101 default:
102 res = kCoreReg;
103 }
104 return res;
105}
106
107RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) {
108 RegisterClass res;
109 if (loc.fp) {
110 DCHECK(!loc.ref) << "At most, one of ref/fp may be set";
111 res = kFPReg;
112 } else if (loc.ref) {
113 res = kRefReg;
114 } else {
115 res = kCoreReg;
116 }
117 return res;
118}
119
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000120void Mir2Lir::LockArg(size_t in_position) {
121 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800122
Serguei Katkov717a3e42014-11-13 17:19:42 +0600123 if (reg_arg.Valid()) {
124 LockTemp(reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800125 }
126}
127
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000128RegStorage Mir2Lir::LoadArg(size_t in_position, RegisterClass reg_class, bool wide) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100129 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000130 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700131
132 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800133 /*
134 * When doing a call for x86, it moves the stack pointer in order to push return.
135 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800136 */
137 offset += sizeof(uint32_t);
138 }
139
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700140 if (cu_->instruction_set == kX86_64) {
141 /*
142 * When doing a call for x86, it moves the stack pointer in order to push return.
143 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
144 */
145 offset += sizeof(uint64_t);
146 }
147
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000148 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Serguei Katkov717a3e42014-11-13 17:19:42 +0600149
150 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
151 if (reg_arg.Valid() && wide && (reg_arg.GetWideKind() == kNotWide)) {
152 // For wide register we've got only half of it.
153 // Flush it to memory then.
154 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
155 reg_arg = RegStorage::InvalidReg();
156 }
157
158 if (!reg_arg.Valid()) {
159 reg_arg = wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class);
160 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, wide ? k64 : k32, kNotVolatile);
161 } else {
162 // Check if we need to copy the arg to a different reg_class.
163 if (!RegClassMatches(reg_class, reg_arg)) {
164 if (wide) {
165 RegStorage new_reg = AllocTypedTempWide(false, reg_class);
166 OpRegCopyWide(new_reg, reg_arg);
167 reg_arg = new_reg;
168 } else {
169 RegStorage new_reg = AllocTypedTemp(false, reg_class);
170 OpRegCopy(new_reg, reg_arg);
171 reg_arg = new_reg;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700172 }
173 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800174 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100175 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800176}
177
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000178void Mir2Lir::LoadArgDirect(size_t in_position, RegLocation rl_dest) {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600179 DCHECK_EQ(rl_dest.location, kLocPhysReg);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100180 ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000181 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700182 if (cu_->instruction_set == kX86) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800183 /*
184 * When doing a call for x86, it moves the stack pointer in order to push return.
185 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800186 */
187 offset += sizeof(uint32_t);
188 }
189
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700190 if (cu_->instruction_set == kX86_64) {
191 /*
192 * When doing a call for x86, it moves the stack pointer in order to push return.
193 * Thus, we add another 8 bytes to figure out the out of caller (in of callee).
194 */
195 offset += sizeof(uint64_t);
196 }
197
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000198 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
Serguei Katkov717a3e42014-11-13 17:19:42 +0600199
200 // TODO: REVISIT: This adds a spill of low part while we could just copy it.
201 if (reg_arg.Valid() && rl_dest.wide && (reg_arg.GetWideKind() == kNotWide)) {
202 // For wide register we've got only half of it.
203 // Flush it to memory then.
204 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile);
205 reg_arg = RegStorage::InvalidReg();
206 }
207
208 if (!reg_arg.Valid()) {
209 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, rl_dest.wide ? k64 : k32, kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800210 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600211 if (rl_dest.wide) {
212 OpRegCopyWide(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800213 } else {
Serguei Katkov717a3e42014-11-13 17:19:42 +0600214 OpRegCopy(rl_dest.reg, reg_arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800215 }
216 }
217}
218
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000219void Mir2Lir::SpillArg(size_t in_position) {
220 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
221
222 if (reg_arg.Valid()) {
223 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
224 ShortyArg arg = in_to_reg_storage_mapping_.GetShorty(in_position);
225 OpSize size = arg.IsRef() ? kReference :
226 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32;
227 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, size, kNotVolatile);
228 }
229}
230
231void Mir2Lir::UnspillArg(size_t in_position) {
232 RegStorage reg_arg = in_to_reg_storage_mapping_.GetReg(in_position);
233
234 if (reg_arg.Valid()) {
235 int offset = frame_size_ + StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
236 ShortyArg arg = in_to_reg_storage_mapping_.GetShorty(in_position);
237 OpSize size = arg.IsRef() ? kReference :
238 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32;
239 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, size, kNotVolatile);
240 }
241}
242
243Mir2Lir::SpecialSuspendCheckSlowPath* Mir2Lir::GenSpecialSuspendTest() {
244 LockCallTemps();
245 LIR* branch = OpTestSuspend(nullptr);
246 FreeCallTemps();
247 LIR* cont = NewLIR0(kPseudoTargetLabel);
248 SpecialSuspendCheckSlowPath* slow_path =
249 new (arena_) SpecialSuspendCheckSlowPath(this, branch, cont);
250 AddSlowPath(slow_path);
251 return slow_path;
252}
253
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800254bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
255 // FastInstance() already checked by DexFileMethodInliner.
256 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100257 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800258 // The object is not "this" and has to be null-checked.
259 return false;
260 }
261
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000262 OpSize size;
Fred Shih37f05ef2014-07-16 18:38:08 -0700263 switch (data.op_variant) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000264 case InlineMethodAnalyser::IGetVariant(Instruction::IGET):
265 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32;
Fred Shih37f05ef2014-07-16 18:38:08 -0700266 break;
267 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE):
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000268 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kDouble : k64;
269 break;
270 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT):
271 size = kReference;
Fred Shih37f05ef2014-07-16 18:38:08 -0700272 break;
273 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT):
274 size = kSignedHalf;
275 break;
276 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR):
277 size = kUnsignedHalf;
278 break;
279 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE):
280 size = kSignedByte;
281 break;
282 case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN):
283 size = kUnsignedByte;
284 break;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000285 default:
286 LOG(FATAL) << "Unknown variant: " << data.op_variant;
287 UNREACHABLE();
Fred Shih37f05ef2014-07-16 18:38:08 -0700288 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100289
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800290 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000291 if (!kLeafOptimization) {
292 auto* slow_path = GenSpecialSuspendTest();
293 slow_path->PreserveArg(data.object_arg);
294 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800295 LockArg(data.object_arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000296 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700297 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100298 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
buzbeea0cd2d72014-06-01 09:33:49 -0700299 RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]);
Fred Shih37f05ef2014-07-16 18:38:08 -0700300 RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100301 RegStorage r_result = rl_dest.reg;
302 if (!RegClassMatches(reg_class, r_result)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700303 r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class)
304 : AllocTypedTemp(rl_dest.fp, reg_class);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100305 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700306 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000307 LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100308 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000309 LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile :
310 kNotVolatile);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100311 }
buzbeeb5860fb2014-06-21 15:31:01 -0700312 if (r_result.NotExactlyEquals(rl_dest.reg)) {
Fred Shih37f05ef2014-07-16 18:38:08 -0700313 if (IsWide(size)) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100314 OpRegCopyWide(rl_dest.reg, r_result);
315 } else {
316 OpRegCopy(rl_dest.reg, r_result);
317 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800318 }
319 return true;
320}
321
322bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
323 // FastInstance() already checked by DexFileMethodInliner.
324 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100325 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800326 // The object is not "this" and has to be null-checked.
327 return false;
328 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100329 if (data.return_arg_plus1 != 0u) {
330 // The setter returns a method argument which we don't support here.
331 return false;
332 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800333
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000334 OpSize size;
Fred Shih37f05ef2014-07-16 18:38:08 -0700335 switch (data.op_variant) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000336 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT):
337 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32;
Fred Shih37f05ef2014-07-16 18:38:08 -0700338 break;
339 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE):
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000340 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kDouble : k64;
341 break;
342 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT):
343 size = kReference;
Fred Shih37f05ef2014-07-16 18:38:08 -0700344 break;
345 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT):
346 size = kSignedHalf;
347 break;
348 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR):
349 size = kUnsignedHalf;
350 break;
351 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE):
352 size = kSignedByte;
353 break;
354 case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN):
355 size = kUnsignedByte;
356 break;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000357 default:
358 LOG(FATAL) << "Unknown variant: " << data.op_variant;
359 UNREACHABLE();
Fred Shih37f05ef2014-07-16 18:38:08 -0700360 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800361
362 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000363 if (!kLeafOptimization) {
364 auto* slow_path = GenSpecialSuspendTest();
365 slow_path->PreserveArg(data.object_arg);
366 slow_path->PreserveArg(data.src_arg);
367 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800368 LockArg(data.object_arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000369 LockArg(data.src_arg);
370 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700371 RegStorage reg_obj = LoadArg(data.object_arg, kRefReg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100372 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
Fred Shih37f05ef2014-07-16 18:38:08 -0700373 RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size));
374 if (IsRef(size)) {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000375 StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile);
Vladimir Marko674744e2014-04-24 15:18:26 +0100376 } else {
Andreas Gampe3c12c512014-06-24 18:46:29 +0000377 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile :
378 kNotVolatile);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800379 }
Fred Shih37f05ef2014-07-16 18:38:08 -0700380 if (IsRef(size)) {
Vladimir Marko743b98c2014-11-24 19:45:41 +0000381 MarkGCCard(0, reg_src, reg_obj);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800382 }
383 return true;
384}
385
386bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
387 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000388 bool wide = (data.is_wide != 0u);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800389
390 // Point of no return - no aborts after this
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000391 if (!kLeafOptimization) {
392 auto* slow_path = GenSpecialSuspendTest();
393 slow_path->PreserveArg(data.arg);
394 }
395 LockArg(data.arg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800396 GenPrintLabel(mir);
buzbeea0cd2d72014-06-01 09:33:49 -0700397 RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]);
398 RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800399 LoadArgDirect(data.arg, rl_dest);
400 return true;
401}
402
403/*
404 * Special-case code generation for simple non-throwing leaf methods.
405 */
406bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
407 DCHECK(special.flags & kInlineSpecial);
408 current_dalvik_offset_ = mir->offset;
Vladimir Marko767c7522015-03-20 12:47:30 +0000409 DCHECK(current_mir_ == nullptr); // Safepoints attributed to prologue.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800410 MIR* return_mir = nullptr;
411 bool successful = false;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000412 EnsureInitializedArgMappingToPhysicalReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800413
414 switch (special.opcode) {
415 case kInlineOpNop:
416 successful = true;
417 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000418 if (!kLeafOptimization) {
419 GenSpecialSuspendTest();
420 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800421 return_mir = mir;
422 break;
423 case kInlineOpNonWideConst: {
424 successful = true;
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000425 if (!kLeafOptimization) {
426 GenSpecialSuspendTest();
427 }
buzbeea0cd2d72014-06-01 09:33:49 -0700428 RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0]));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800429 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800430 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700431 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800432 break;
433 }
434 case kInlineOpReturnArg:
435 successful = GenSpecialIdentity(mir, special);
436 return_mir = mir;
437 break;
438 case kInlineOpIGet:
439 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700440 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800441 break;
442 case kInlineOpIPut:
443 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700444 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800445 break;
446 default:
447 break;
448 }
449
450 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000451 if (kIsDebugBuild) {
452 // Clear unreachable catch entries.
453 mir_graph_->catches_.clear();
454 }
455
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800456 // Handle verbosity for return MIR.
457 if (return_mir != nullptr) {
458 current_dalvik_offset_ = return_mir->offset;
459 // Not handling special identity case because it already generated code as part
460 // of the return. The label should have been added before any code was generated.
461 if (special.opcode != kInlineOpReturnArg) {
462 GenPrintLabel(return_mir);
463 }
464 }
465 GenSpecialExitSequence();
466
Vladimir Marko6ce3eba2015-02-16 13:05:59 +0000467 if (!kLeafOptimization) {
468 HandleSlowPaths();
469 } else {
470 core_spill_mask_ = 0;
471 num_core_spills_ = 0;
472 fp_spill_mask_ = 0;
473 num_fp_spills_ = 0;
474 frame_size_ = 0;
475 core_vmap_table_.clear();
476 fp_vmap_table_.clear();
477 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800478 }
479
480 return successful;
481}
482
Brian Carlstrom7940e442013-07-12 13:46:57 -0700483/*
484 * Target-independent code generation. Use only high-level
485 * load/store utilities here, or target-dependent genXX() handlers
486 * when necessary.
487 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700488void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700489 RegLocation rl_src[3];
490 RegLocation rl_dest = mir_graph_->GetBadLoc();
491 RegLocation rl_result = mir_graph_->GetBadLoc();
Ian Rogersc35cda82014-11-10 16:34:29 -0800492 const Instruction::Code opcode = mir->dalvikInsn.opcode;
493 const int opt_flags = mir->optimization_flags;
494 const uint32_t vB = mir->dalvikInsn.vB;
495 const uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700496 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
497 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700498
499 // Prep Src and Dest locations.
500 int next_sreg = 0;
501 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700502 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700503 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
504 if (attrs & DF_UA) {
505 if (attrs & DF_A_WIDE) {
506 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
507 next_sreg+= 2;
508 } else {
509 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
510 next_sreg++;
511 }
512 }
513 if (attrs & DF_UB) {
514 if (attrs & DF_B_WIDE) {
515 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
516 next_sreg+= 2;
517 } else {
518 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
519 next_sreg++;
520 }
521 }
522 if (attrs & DF_UC) {
523 if (attrs & DF_C_WIDE) {
524 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
525 } else {
526 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
527 }
528 }
529 if (attrs & DF_DA) {
530 if (attrs & DF_A_WIDE) {
531 rl_dest = mir_graph_->GetDestWide(mir);
532 } else {
533 rl_dest = mir_graph_->GetDest(mir);
534 }
535 }
536 switch (opcode) {
537 case Instruction::NOP:
538 break;
539
540 case Instruction::MOVE_EXCEPTION:
541 GenMoveException(rl_dest);
542 break;
543
Mathieu Chartierd7cbf8a2015-03-19 12:43:20 -0700544 case Instruction::RETURN_VOID_NO_BARRIER:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 case Instruction::RETURN_VOID:
546 if (((cu_->access_flags & kAccConstructor) != 0) &&
547 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
548 cu_->class_def_idx)) {
549 GenMemBarrier(kStoreStore);
550 }
Wei Jin04f4d8a2014-05-29 18:04:29 -0700551 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700552 GenSuspendTest(opt_flags);
553 }
554 break;
555
Brian Carlstrom7940e442013-07-12 13:46:57 -0700556 case Instruction::RETURN_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700557 DCHECK(rl_src[0].ref);
Ian Rogersfc787ec2014-10-09 21:56:44 -0700558 FALLTHROUGH_INTENDED;
buzbeea0cd2d72014-06-01 09:33:49 -0700559 case Instruction::RETURN:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700560 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561 GenSuspendTest(opt_flags);
562 }
buzbeea0cd2d72014-06-01 09:33:49 -0700563 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
564 StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700565 break;
566
567 case Instruction::RETURN_WIDE:
Wei Jin04f4d8a2014-05-29 18:04:29 -0700568 if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700569 GenSuspendTest(opt_flags);
570 }
buzbeea0cd2d72014-06-01 09:33:49 -0700571 DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0]));
572 StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700573 break;
574
575 case Instruction::MOVE_RESULT_WIDE:
buzbeea0cd2d72014-06-01 09:33:49 -0700576 StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700577 break;
578
579 case Instruction::MOVE_RESULT:
580 case Instruction::MOVE_RESULT_OBJECT:
buzbeea0cd2d72014-06-01 09:33:49 -0700581 StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700582 break;
583
584 case Instruction::MOVE:
585 case Instruction::MOVE_OBJECT:
586 case Instruction::MOVE_16:
587 case Instruction::MOVE_OBJECT_16:
588 case Instruction::MOVE_FROM16:
589 case Instruction::MOVE_OBJECT_FROM16:
590 StoreValue(rl_dest, rl_src[0]);
591 break;
592
593 case Instruction::MOVE_WIDE:
594 case Instruction::MOVE_WIDE_16:
595 case Instruction::MOVE_WIDE_FROM16:
596 StoreValueWide(rl_dest, rl_src[0]);
597 break;
598
599 case Instruction::CONST:
600 case Instruction::CONST_4:
601 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400602 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 break;
604
605 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400606 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 break;
608
609 case Instruction::CONST_WIDE_16:
610 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000611 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 break;
613
614 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000615 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 break;
617
618 case Instruction::CONST_WIDE_HIGH16:
619 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800620 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621 StoreValueWide(rl_dest, rl_result);
622 break;
623
624 case Instruction::MONITOR_ENTER:
625 GenMonitorEnter(opt_flags, rl_src[0]);
626 break;
627
628 case Instruction::MONITOR_EXIT:
629 GenMonitorExit(opt_flags, rl_src[0]);
630 break;
631
632 case Instruction::CHECK_CAST: {
Vladimir Marko22fe45d2015-03-18 11:33:58 +0000633 GenCheckCast(opt_flags, mir->offset, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700634 break;
635 }
636 case Instruction::INSTANCE_OF:
637 GenInstanceof(vC, rl_dest, rl_src[0]);
638 break;
639
640 case Instruction::NEW_INSTANCE:
641 GenNewInstance(vB, rl_dest);
642 break;
643
644 case Instruction::THROW:
645 GenThrow(rl_src[0]);
646 break;
647
Ian Rogersc35cda82014-11-10 16:34:29 -0800648 case Instruction::ARRAY_LENGTH: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 int len_offset;
650 len_offset = mirror::Array::LengthOffset().Int32Value();
buzbeea0cd2d72014-06-01 09:33:49 -0700651 rl_src[0] = LoadValue(rl_src[0], kRefReg);
buzbee2700f7e2014-03-07 09:46:20 -0800652 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700653 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700654 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700655 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 StoreValue(rl_dest, rl_result);
657 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800658 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700659 case Instruction::CONST_STRING:
660 case Instruction::CONST_STRING_JUMBO:
661 GenConstString(vB, rl_dest);
662 break;
663
664 case Instruction::CONST_CLASS:
665 GenConstClass(vB, rl_dest);
666 break;
667
668 case Instruction::FILL_ARRAY_DATA:
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700669 GenFillArrayData(mir, vB, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 break;
671
672 case Instruction::FILLED_NEW_ARRAY:
673 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
674 false /* not range */));
675 break;
676
677 case Instruction::FILLED_NEW_ARRAY_RANGE:
678 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
679 true /* range */));
680 break;
681
682 case Instruction::NEW_ARRAY:
683 GenNewArray(vC, rl_dest, rl_src[0]);
684 break;
685
686 case Instruction::GOTO:
687 case Instruction::GOTO_16:
688 case Instruction::GOTO_32:
Vladimir Marko8b858e12014-11-27 14:52:37 +0000689 if (mir_graph_->IsBackEdge(bb, bb->taken)) {
buzbee0d829482013-10-11 15:24:55 -0700690 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700691 } else {
buzbee0d829482013-10-11 15:24:55 -0700692 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 }
694 break;
695
696 case Instruction::PACKED_SWITCH:
697 GenPackedSwitch(mir, vB, rl_src[0]);
698 break;
699
700 case Instruction::SPARSE_SWITCH:
701 GenSparseSwitch(mir, vB, rl_src[0]);
702 break;
703
704 case Instruction::CMPL_FLOAT:
705 case Instruction::CMPG_FLOAT:
706 case Instruction::CMPL_DOUBLE:
707 case Instruction::CMPG_DOUBLE:
708 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
709 break;
710
711 case Instruction::CMP_LONG:
712 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
713 break;
714
715 case Instruction::IF_EQ:
716 case Instruction::IF_NE:
717 case Instruction::IF_LT:
718 case Instruction::IF_GE:
719 case Instruction::IF_GT:
720 case Instruction::IF_LE: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000721 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000722 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000724 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000725 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700726 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800727 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 case Instruction::IF_EQZ:
729 case Instruction::IF_NEZ:
730 case Instruction::IF_LTZ:
731 case Instruction::IF_GEZ:
732 case Instruction::IF_GTZ:
733 case Instruction::IF_LEZ: {
Vladimir Marko8b858e12014-11-27 14:52:37 +0000734 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000735 GenSuspendTest(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700736 }
Vladimir Marko8b858e12014-11-27 14:52:37 +0000737 LIR* taken = &label_list[bb->taken];
Vladimir Marko7ab2fce2014-11-28 13:38:28 +0000738 GenCompareZeroAndBranch(opcode, rl_src[0], taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 break;
Ian Rogersc35cda82014-11-10 16:34:29 -0800740 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741
742 case Instruction::AGET_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400743 GenArrayGet(opt_flags, rl_dest.fp ? kDouble : k64, rl_src[0], rl_src[1], rl_dest, 3);
buzbee695d13a2014-04-19 13:32:20 -0700744 break;
745 case Instruction::AGET_OBJECT:
746 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700747 break;
748 case Instruction::AGET:
Mark Mendellca541342014-10-15 16:59:49 -0400749 GenArrayGet(opt_flags, rl_dest.fp ? kSingle : k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700750 break;
751 case Instruction::AGET_BOOLEAN:
752 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
753 break;
754 case Instruction::AGET_BYTE:
755 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
756 break;
757 case Instruction::AGET_CHAR:
758 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
759 break;
760 case Instruction::AGET_SHORT:
761 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
762 break;
763 case Instruction::APUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400764 GenArrayPut(opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700765 break;
766 case Instruction::APUT:
Mark Mendellca541342014-10-15 16:59:49 -0400767 GenArrayPut(opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700769 case Instruction::APUT_OBJECT: {
770 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
771 bool is_safe = is_null; // Always safe to store null.
772 if (!is_safe) {
773 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000774 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
775 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700776 }
777 if (is_null || is_safe) {
778 // Store of constant null doesn't require an assignability test and can be generated inline
779 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700780 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700781 } else {
782 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
783 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700785 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700786 case Instruction::APUT_SHORT:
787 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700788 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 break;
790 case Instruction::APUT_BYTE:
791 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700792 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700793 break;
794
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800795 case Instruction::IGET_OBJECT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 case Instruction::IGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700797 GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 break;
799
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800800 case Instruction::IGET_WIDE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700801 case Instruction::IGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700802 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400803 if (rl_dest.fp) {
804 GenIGet(mir, opt_flags, kDouble, Primitive::kPrimDouble, rl_dest, rl_src[0]);
805 } else {
806 GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]);
807 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808 break;
809
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800810 case Instruction::IGET_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700811 case Instruction::IGET:
Mark Mendellca541342014-10-15 16:59:49 -0400812 if (rl_dest.fp) {
813 GenIGet(mir, opt_flags, kSingle, Primitive::kPrimFloat, rl_dest, rl_src[0]);
814 } else {
815 GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]);
816 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700817 break;
818
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800819 case Instruction::IGET_CHAR_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700820 case Instruction::IGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700821 GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700822 break;
823
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800824 case Instruction::IGET_SHORT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700825 case Instruction::IGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700826 GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700827 break;
828
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800829 case Instruction::IGET_BOOLEAN_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700830 case Instruction::IGET_BOOLEAN:
Fred Shih37f05ef2014-07-16 18:38:08 -0700831 GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]);
832 break;
833
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800834 case Instruction::IGET_BYTE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700835 case Instruction::IGET_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700836 GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 break;
838
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800839 case Instruction::IPUT_WIDE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700840 case Instruction::IPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400841 GenIPut(mir, opt_flags, rl_src[0].fp ? kDouble : k64, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 break;
843
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800844 case Instruction::IPUT_OBJECT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700845 case Instruction::IPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700846 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700847 break;
848
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800849 case Instruction::IPUT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700850 case Instruction::IPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400851 GenIPut(mir, opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852 break;
853
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800854 case Instruction::IPUT_BYTE_QUICK:
855 case Instruction::IPUT_BOOLEAN_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 case Instruction::IPUT_BYTE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700857 case Instruction::IPUT_BOOLEAN:
858 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700859 break;
860
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800861 case Instruction::IPUT_CHAR_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 case Instruction::IPUT_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700863 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700864 break;
865
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800866 case Instruction::IPUT_SHORT_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700867 case Instruction::IPUT_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700868 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700869 break;
870
871 case Instruction::SGET_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700872 GenSget(mir, rl_dest, kReference, Primitive::kPrimNot);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700873 break;
Fred Shih37f05ef2014-07-16 18:38:08 -0700874
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875 case Instruction::SGET:
Mark Mendellca541342014-10-15 16:59:49 -0400876 GenSget(mir, rl_dest, rl_dest.fp ? kSingle : k32, Primitive::kPrimInt);
Fred Shih37f05ef2014-07-16 18:38:08 -0700877 break;
878
Brian Carlstrom7940e442013-07-12 13:46:57 -0700879 case Instruction::SGET_CHAR:
Fred Shih37f05ef2014-07-16 18:38:08 -0700880 GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar);
881 break;
882
Brian Carlstrom7940e442013-07-12 13:46:57 -0700883 case Instruction::SGET_SHORT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700884 GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort);
885 break;
886
887 case Instruction::SGET_BOOLEAN:
888 GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean);
889 break;
890
891 case Instruction::SGET_BYTE:
892 GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700893 break;
894
895 case Instruction::SGET_WIDE:
Fred Shih37f05ef2014-07-16 18:38:08 -0700896 // kPrimLong and kPrimDouble share the same entrypoints.
Mark Mendellca541342014-10-15 16:59:49 -0400897 GenSget(mir, rl_dest, rl_dest.fp ? kDouble : k64, Primitive::kPrimDouble);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700898 break;
899
900 case Instruction::SPUT_OBJECT:
Fred Shih37f05ef2014-07-16 18:38:08 -0700901 GenSput(mir, rl_src[0], kReference);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700902 break;
903
904 case Instruction::SPUT:
Mark Mendellca541342014-10-15 16:59:49 -0400905 GenSput(mir, rl_src[0], rl_src[0].fp ? kSingle : k32);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700906 break;
907
Fred Shih37f05ef2014-07-16 18:38:08 -0700908 case Instruction::SPUT_BYTE:
909 case Instruction::SPUT_BOOLEAN:
910 GenSput(mir, rl_src[0], kUnsignedByte);
911 break;
912
913 case Instruction::SPUT_CHAR:
914 GenSput(mir, rl_src[0], kUnsignedHalf);
915 break;
916
917 case Instruction::SPUT_SHORT:
918 GenSput(mir, rl_src[0], kSignedHalf);
919 break;
920
921
Brian Carlstrom7940e442013-07-12 13:46:57 -0700922 case Instruction::SPUT_WIDE:
Mark Mendellca541342014-10-15 16:59:49 -0400923 GenSput(mir, rl_src[0], rl_src[0].fp ? kDouble : k64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700924 break;
925
926 case Instruction::INVOKE_STATIC_RANGE:
927 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
928 break;
929 case Instruction::INVOKE_STATIC:
930 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
931 break;
932
933 case Instruction::INVOKE_DIRECT:
934 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
935 break;
936 case Instruction::INVOKE_DIRECT_RANGE:
937 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
938 break;
939
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800940 case Instruction::INVOKE_VIRTUAL_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700941 case Instruction::INVOKE_VIRTUAL:
942 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
943 break;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800944
945 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 case Instruction::INVOKE_VIRTUAL_RANGE:
947 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
948 break;
949
950 case Instruction::INVOKE_SUPER:
951 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
952 break;
953 case Instruction::INVOKE_SUPER_RANGE:
954 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
955 break;
956
957 case Instruction::INVOKE_INTERFACE:
958 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
959 break;
960 case Instruction::INVOKE_INTERFACE_RANGE:
961 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
962 break;
963
964 case Instruction::NEG_INT:
965 case Instruction::NOT_INT:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700966 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700967 break;
968
969 case Instruction::NEG_LONG:
970 case Instruction::NOT_LONG:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -0700971 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700972 break;
973
974 case Instruction::NEG_FLOAT:
975 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
976 break;
977
978 case Instruction::NEG_DOUBLE:
979 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
980 break;
981
982 case Instruction::INT_TO_LONG:
983 GenIntToLong(rl_dest, rl_src[0]);
984 break;
985
986 case Instruction::LONG_TO_INT:
Yevgeny Rouban6af82062014-11-26 18:11:54 +0600987 GenLongToInt(rl_dest, rl_src[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700988 break;
989
990 case Instruction::INT_TO_BYTE:
991 case Instruction::INT_TO_SHORT:
992 case Instruction::INT_TO_CHAR:
993 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
994 break;
995
996 case Instruction::INT_TO_FLOAT:
997 case Instruction::INT_TO_DOUBLE:
998 case Instruction::LONG_TO_FLOAT:
999 case Instruction::LONG_TO_DOUBLE:
1000 case Instruction::FLOAT_TO_INT:
1001 case Instruction::FLOAT_TO_LONG:
1002 case Instruction::FLOAT_TO_DOUBLE:
1003 case Instruction::DOUBLE_TO_INT:
1004 case Instruction::DOUBLE_TO_LONG:
1005 case Instruction::DOUBLE_TO_FLOAT:
1006 GenConversion(opcode, rl_dest, rl_src[0]);
1007 break;
1008
1009
1010 case Instruction::ADD_INT:
1011 case Instruction::ADD_INT_2ADDR:
1012 case Instruction::MUL_INT:
1013 case Instruction::MUL_INT_2ADDR:
1014 case Instruction::AND_INT:
1015 case Instruction::AND_INT_2ADDR:
1016 case Instruction::OR_INT:
1017 case Instruction::OR_INT_2ADDR:
1018 case Instruction::XOR_INT:
1019 case Instruction::XOR_INT_2ADDR:
1020 if (rl_src[0].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001021 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001022 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
1023 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
1024 } else if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001025 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
1027 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
1028 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001029 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001030 }
1031 break;
1032
1033 case Instruction::SUB_INT:
1034 case Instruction::SUB_INT_2ADDR:
1035 case Instruction::DIV_INT:
1036 case Instruction::DIV_INT_2ADDR:
1037 case Instruction::REM_INT:
1038 case Instruction::REM_INT_2ADDR:
1039 case Instruction::SHL_INT:
1040 case Instruction::SHL_INT_2ADDR:
1041 case Instruction::SHR_INT:
1042 case Instruction::SHR_INT_2ADDR:
1043 case Instruction::USHR_INT:
1044 case Instruction::USHR_INT_2ADDR:
1045 if (rl_src[1].is_const &&
Matteo Franchinc763e352014-07-04 12:53:27 +01001046 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001047 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
1048 } else {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001049 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050 }
1051 break;
1052
1053 case Instruction::ADD_LONG:
1054 case Instruction::SUB_LONG:
1055 case Instruction::AND_LONG:
1056 case Instruction::OR_LONG:
1057 case Instruction::XOR_LONG:
1058 case Instruction::ADD_LONG_2ADDR:
1059 case Instruction::SUB_LONG_2ADDR:
1060 case Instruction::AND_LONG_2ADDR:
1061 case Instruction::OR_LONG_2ADDR:
1062 case Instruction::XOR_LONG_2ADDR:
1063 if (rl_src[0].is_const || rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001064 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001065 break;
1066 }
Ian Rogersfc787ec2014-10-09 21:56:44 -07001067 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001068 case Instruction::MUL_LONG:
1069 case Instruction::DIV_LONG:
1070 case Instruction::REM_LONG:
1071 case Instruction::MUL_LONG_2ADDR:
1072 case Instruction::DIV_LONG_2ADDR:
1073 case Instruction::REM_LONG_2ADDR:
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001074 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001075 break;
1076
1077 case Instruction::SHL_LONG:
1078 case Instruction::SHR_LONG:
1079 case Instruction::USHR_LONG:
1080 case Instruction::SHL_LONG_2ADDR:
1081 case Instruction::SHR_LONG_2ADDR:
1082 case Instruction::USHR_LONG_2ADDR:
1083 if (rl_src[1].is_const) {
Razvan A Lupusoru5c5676b2014-09-29 16:42:11 -07001084 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1], opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 } else {
1086 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
1087 }
1088 break;
1089
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001090 case Instruction::DIV_FLOAT:
1091 case Instruction::DIV_FLOAT_2ADDR:
1092 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1093 break;
1094 }
1095 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001096 case Instruction::ADD_FLOAT:
1097 case Instruction::SUB_FLOAT:
1098 case Instruction::MUL_FLOAT:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001099 case Instruction::REM_FLOAT:
1100 case Instruction::ADD_FLOAT_2ADDR:
1101 case Instruction::SUB_FLOAT_2ADDR:
1102 case Instruction::MUL_FLOAT_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001103 case Instruction::REM_FLOAT_2ADDR:
1104 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
1105 break;
1106
Ningsheng Jian675e09b2014-10-23 13:48:36 +08001107 case Instruction::DIV_DOUBLE:
1108 case Instruction::DIV_DOUBLE_2ADDR:
1109 if (HandleEasyFloatingPointDiv(rl_dest, rl_src[0], rl_src[1])) {
1110 break;
1111 }
1112 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113 case Instruction::ADD_DOUBLE:
1114 case Instruction::SUB_DOUBLE:
1115 case Instruction::MUL_DOUBLE:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116 case Instruction::REM_DOUBLE:
1117 case Instruction::ADD_DOUBLE_2ADDR:
1118 case Instruction::SUB_DOUBLE_2ADDR:
1119 case Instruction::MUL_DOUBLE_2ADDR:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001120 case Instruction::REM_DOUBLE_2ADDR:
1121 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
1122 break;
1123
1124 case Instruction::RSUB_INT:
1125 case Instruction::ADD_INT_LIT16:
1126 case Instruction::MUL_INT_LIT16:
1127 case Instruction::DIV_INT_LIT16:
1128 case Instruction::REM_INT_LIT16:
1129 case Instruction::AND_INT_LIT16:
1130 case Instruction::OR_INT_LIT16:
1131 case Instruction::XOR_INT_LIT16:
1132 case Instruction::ADD_INT_LIT8:
1133 case Instruction::RSUB_INT_LIT8:
1134 case Instruction::MUL_INT_LIT8:
1135 case Instruction::DIV_INT_LIT8:
1136 case Instruction::REM_INT_LIT8:
1137 case Instruction::AND_INT_LIT8:
1138 case Instruction::OR_INT_LIT8:
1139 case Instruction::XOR_INT_LIT8:
1140 case Instruction::SHL_INT_LIT8:
1141 case Instruction::SHR_INT_LIT8:
1142 case Instruction::USHR_INT_LIT8:
1143 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
1144 break;
1145
1146 default:
1147 LOG(FATAL) << "Unexpected opcode: " << opcode;
1148 }
buzbee082833c2014-05-17 23:16:26 -07001149 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -07001150} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -07001151
1152// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001153void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001154 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
1155 case kMirOpCopy: {
1156 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
1157 RegLocation rl_dest = mir_graph_->GetDest(mir);
1158 StoreValue(rl_dest, rl_src);
1159 break;
1160 }
1161 case kMirOpFusedCmplFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001162 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1163 GenSuspendTest(mir->optimization_flags);
1164 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001165 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
1166 break;
1167 case kMirOpFusedCmpgFloat:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001168 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1169 GenSuspendTest(mir->optimization_flags);
1170 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001171 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
1172 break;
1173 case kMirOpFusedCmplDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001174 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1175 GenSuspendTest(mir->optimization_flags);
1176 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001177 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
1178 break;
1179 case kMirOpFusedCmpgDouble:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001180 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1181 GenSuspendTest(mir->optimization_flags);
1182 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001183 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
1184 break;
1185 case kMirOpFusedCmpLong:
Vladimir Marko8b858e12014-11-27 14:52:37 +00001186 if (mir_graph_->IsBackEdge(bb, bb->taken) || mir_graph_->IsBackEdge(bb, bb->fall_through)) {
1187 GenSuspendTest(mir->optimization_flags);
1188 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001189 GenFusedLongCmpBranch(bb, mir);
1190 break;
1191 case kMirOpSelect:
1192 GenSelect(bb, mir);
1193 break;
Razvan A Lupusoru76423242014-08-04 09:38:46 -07001194 case kMirOpNullCheck: {
1195 RegLocation rl_obj = mir_graph_->GetSrc(mir, 0);
1196 rl_obj = LoadValue(rl_obj, kRefReg);
1197 // An explicit check is done because it is not expected that when this is used,
1198 // that it will actually trip up the implicit checks (since an invalid access
1199 // is needed on the null object).
1200 GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags);
1201 break;
1202 }
Mark Mendelld65c51a2014-04-29 16:55:20 -04001203 case kMirOpPhi:
1204 case kMirOpNop:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001205 case kMirOpRangeCheck:
1206 case kMirOpDivZeroCheck:
1207 case kMirOpCheck:
1208 case kMirOpCheckPart2:
1209 // Ignore these known opcodes
1210 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001211 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -04001212 // Give the backends a chance to handle unknown extended MIR opcodes.
1213 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001214 break;
1215 }
1216}
1217
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001218void Mir2Lir::GenPrintLabel(MIR* mir) {
1219 // Mark the beginning of a Dalvik instruction for line tracking.
1220 if (cu_->verbose) {
1221 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
1222 MarkBoundary(mir->offset, inst_str);
1223 }
1224}
1225
Brian Carlstrom7940e442013-07-12 13:46:57 -07001226// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001227bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001228 if (bb->block_type == kDead) return false;
1229 current_dalvik_offset_ = bb->start_offset;
1230 MIR* mir;
1231 int block_id = bb->id;
1232
1233 block_label_list_[block_id].operands[0] = bb->start_offset;
1234
1235 // Insert the block label.
1236 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001237 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001238 AppendLIR(&block_label_list_[block_id]);
1239
1240 LIR* head_lir = NULL;
1241
1242 // If this is a catch block, export the start address.
1243 if (bb->catch_entry) {
1244 head_lir = NewLIR0(kPseudoExportedPC);
1245 }
1246
1247 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001248 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001249
1250 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001251 ResetRegPool();
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001252 int start_vreg = mir_graph_->GetFirstInVR();
David Srbecky6f715892015-03-30 14:21:42 +01001253 AppendLIR(NewLIR0(kPseudoPrologueBegin));
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001254 GenEntrySequence(&mir_graph_->reg_location_[start_vreg], mir_graph_->GetMethodLoc());
David Srbecky6f715892015-03-30 14:21:42 +01001255 AppendLIR(NewLIR0(kPseudoPrologueEnd));
David Srbecky1109fb32015-04-07 20:21:06 +01001256 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001257 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001258 ResetRegPool();
David Srbecky1109fb32015-04-07 20:21:06 +01001259 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
David Srbecky6f715892015-03-30 14:21:42 +01001260 AppendLIR(NewLIR0(kPseudoEpilogueBegin));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001261 GenExitSequence();
David Srbecky6f715892015-03-30 14:21:42 +01001262 AppendLIR(NewLIR0(kPseudoEpilogueEnd));
David Srbecky1109fb32015-04-07 20:21:06 +01001263 DCHECK_EQ(cfi_.GetCurrentCFAOffset(), frame_size_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001264 }
1265
1266 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1267 ResetRegPool();
1268 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001269 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001270 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001271 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001272 }
1273
1274 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1275 ResetDefTracking();
1276 }
1277
1278 // Reset temp tracking sanity check.
1279 if (kIsDebugBuild) {
1280 live_sreg_ = INVALID_SREG;
1281 }
1282
1283 current_dalvik_offset_ = mir->offset;
Vladimir Marko767c7522015-03-20 12:47:30 +00001284 current_mir_ = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001285 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001286
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001287 GenPrintLabel(mir);
1288
Brian Carlstrom7940e442013-07-12 13:46:57 -07001289 // Remember the first LIR for this block.
1290 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001291 head_lir = &block_label_list_[bb->id];
1292 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001293 DCHECK(!head_lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001294 head_lir->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001295 }
1296
1297 if (opcode == kMirOpCheck) {
1298 // Combine check and work halves of throwing instruction.
1299 MIR* work_half = mir->meta.throw_insn;
Alexei Zavjalov56e8e602014-10-30 20:47:28 +06001300 mir->dalvikInsn = work_half->dalvikInsn;
Vladimir Markocc8cc7c2014-10-06 10:52:20 +01001301 mir->optimization_flags = work_half->optimization_flags;
Vladimir Marko4376c872014-01-23 12:39:29 +00001302 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001303 opcode = work_half->dalvikInsn.opcode;
1304 SSARepresentation* ssa_rep = work_half->ssa_rep;
1305 work_half->ssa_rep = mir->ssa_rep;
1306 mir->ssa_rep = ssa_rep;
1307 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001308 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001309 }
1310
Jean Christophe Beyler2ab40eb2014-06-02 09:03:14 -07001311 if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001312 HandleExtendedMethodMIR(bb, mir);
1313 continue;
1314 }
1315
1316 CompileDalvikInstruction(mir, bb, block_label_list_);
1317 }
1318
1319 if (head_lir) {
1320 // Eliminate redundant loads/stores and delay stores into later slots.
1321 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001322 }
1323 return false;
1324}
1325
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001326bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001327 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001328 // Find the first DalvikByteCode block.
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001329 DCHECK_EQ(mir_graph_->GetNumReachableBlocks(), mir_graph_->GetDfsOrder().size());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001330 BasicBlock*bb = NULL;
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001331 for (BasicBlockId dfs_id : mir_graph_->GetDfsOrder()) {
1332 BasicBlock* candidate = mir_graph_->GetBasicBlock(dfs_id);
1333 if (candidate->block_type == kDalvikByteCode) {
1334 bb = candidate;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001335 break;
1336 }
1337 }
1338 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001339 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001340 }
1341 DCHECK_EQ(bb->start_offset, 0);
1342 DCHECK(bb->first_mir_insn != NULL);
1343
1344 // Get the first instruction.
1345 MIR* mir = bb->first_mir_insn;
1346
1347 // Free temp registers and reset redundant store tracking.
1348 ResetRegPool();
1349 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001350 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001351
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001352 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001353}
1354
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001355void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001356 cu_->NewTimingSplit("MIR2LIR");
1357
Brian Carlstrom7940e442013-07-12 13:46:57 -07001358 // Hold the labels of each block.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +00001359 block_label_list_ = arena_->AllocArray<LIR>(mir_graph_->GetNumBlocks(), kArenaAllocLIR);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001360
buzbee56c71782013-09-05 17:13:19 -07001361 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001362 BasicBlock* curr_bb = iter.Next();
1363 BasicBlock* next_bb = iter.Next();
1364 while (curr_bb != NULL) {
1365 MethodBlockCodeGen(curr_bb);
1366 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001367 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1368 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1369 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001370 }
1371 curr_bb = next_bb;
1372 do {
1373 next_bb = iter.Next();
1374 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001375 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001376 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001377}
1378
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001379//
1380// LIR Slow Path
1381//
1382
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001383LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001384 m2l_->SetCurrentDexPc(current_dex_pc_);
Vladimir Marko767c7522015-03-20 12:47:30 +00001385 m2l_->current_mir_ = current_mir_;
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001386 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001387 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001388 return target;
1389}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001390
Andreas Gampe4b537a82014-06-30 22:24:53 -07001391
1392void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp,
1393 bool fail, bool report)
1394 const {
1395 if (rs.Valid()) {
1396 if (ref == RefCheck::kCheckRef) {
1397 if (cu_->target64 && !rs.Is64Bit()) {
1398 if (fail) {
1399 CHECK(false) << "Reg storage not 64b for ref.";
1400 } else if (report) {
1401 LOG(WARNING) << "Reg storage not 64b for ref.";
1402 }
1403 }
1404 }
1405 if (wide == WidenessCheck::kCheckWide) {
1406 if (!rs.Is64Bit()) {
1407 if (fail) {
1408 CHECK(false) << "Reg storage not 64b for wide.";
1409 } else if (report) {
1410 LOG(WARNING) << "Reg storage not 64b for wide.";
1411 }
1412 }
1413 }
1414 // A tighter check would be nice, but for now soft-float will not check float at all.
1415 if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) {
1416 if (!rs.IsFloat()) {
1417 if (fail) {
1418 CHECK(false) << "Reg storage not float for fp.";
1419 } else if (report) {
1420 LOG(WARNING) << "Reg storage not float for fp.";
1421 }
1422 }
1423 } else if (fp == FPCheck::kCheckNotFP) {
1424 if (rs.IsFloat()) {
1425 if (fail) {
1426 CHECK(false) << "Reg storage float for not-fp.";
1427 } else if (report) {
1428 LOG(WARNING) << "Reg storage float for not-fp.";
1429 }
1430 }
1431 }
1432 }
1433}
1434
1435void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const {
1436 // Regrettably can't use the fp part of rl, as that is not really indicative of where a value
1437 // will be stored.
1438 CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide,
1439 rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report);
1440}
1441
Serban Constantinescu63999682014-07-15 17:44:21 +01001442size_t Mir2Lir::GetInstructionOffset(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001443 UNUSED(lir);
1444 UNIMPLEMENTED(FATAL) << "Unsupported GetInstructionOffset()";
1445 UNREACHABLE();
Serban Constantinescu63999682014-07-15 17:44:21 +01001446}
1447
Serguei Katkov717a3e42014-11-13 17:19:42 +06001448void Mir2Lir::InToRegStorageMapping::Initialize(ShortyIterator* shorty,
1449 InToRegStorageMapper* mapper) {
1450 DCHECK(mapper != nullptr);
1451 DCHECK(shorty != nullptr);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001452 DCHECK(!IsInitialized());
1453 DCHECK_EQ(end_mapped_in_, 0u);
1454 DCHECK(!has_arguments_on_stack_);
Serguei Katkov717a3e42014-11-13 17:19:42 +06001455 while (shorty->Next()) {
1456 ShortyArg arg = shorty->GetArg();
1457 RegStorage reg = mapper->GetNextReg(arg);
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001458 mapping_.emplace_back(arg, reg);
1459 if (arg.IsWide()) {
1460 mapping_.emplace_back(ShortyArg(kInvalidShorty), RegStorage::InvalidReg());
1461 }
Serguei Katkov717a3e42014-11-13 17:19:42 +06001462 if (reg.Valid()) {
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001463 end_mapped_in_ = mapping_.size();
1464 // If the VR is wide but wasn't mapped as wide then account for it.
1465 if (arg.IsWide() && !reg.Is64Bit()) {
1466 --end_mapped_in_;
Serguei Katkov717a3e42014-11-13 17:19:42 +06001467 }
1468 } else {
1469 has_arguments_on_stack_ = true;
1470 }
Serguei Katkov717a3e42014-11-13 17:19:42 +06001471 }
1472 initialized_ = true;
1473}
1474
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001475RegStorage Mir2Lir::InToRegStorageMapping::GetReg(size_t in_position) {
Serguei Katkov717a3e42014-11-13 17:19:42 +06001476 DCHECK(IsInitialized());
Vladimir Marko6ce3eba2015-02-16 13:05:59 +00001477 DCHECK_LT(in_position, mapping_.size());
1478 DCHECK_NE(mapping_[in_position].first.GetType(), kInvalidShorty);
1479 return mapping_[in_position].second;
1480}
1481
1482Mir2Lir::ShortyArg Mir2Lir::InToRegStorageMapping::GetShorty(size_t in_position) {
1483 DCHECK(IsInitialized());
1484 DCHECK_LT(static_cast<size_t>(in_position), mapping_.size());
1485 DCHECK_NE(mapping_[in_position].first.GetType(), kInvalidShorty);
1486 return mapping_[in_position].first;
Serguei Katkov717a3e42014-11-13 17:19:42 +06001487}
1488
Brian Carlstrom7940e442013-07-12 13:46:57 -07001489} // namespace art