blob: 71cc0d9cd6f34b26f688bb5658f90a444ad22053 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_ir.h"
18#include "dex/compiler_internals.h"
Brian Carlstrom60d7a652014-03-13 18:10:08 -070019#include "dex/quick/arm/arm_lir.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "dex/quick/mir_to_lir-inl.h"
Ian Rogers166db042013-07-26 12:05:57 -070021#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022#include "mirror/array.h"
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080023#include "mirror/object-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024#include "verifier/method_verifier.h"
Dave Allisonbcec6fb2014-01-17 12:52:22 -080025#include <functional>
Brian Carlstrom7940e442013-07-12 13:46:57 -070026
27namespace art {
28
29/*
30 * This source files contains "gen" codegen routines that should
31 * be applicable to most targets. Only mid-level support utilities
32 * and "op" calls may be used here.
33 */
34
35/*
buzbeeb48819d2013-09-14 16:15:25 -070036 * Generate a kPseudoBarrier marker to indicate the boundary of special
Brian Carlstrom7940e442013-07-12 13:46:57 -070037 * blocks.
38 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070039void Mir2Lir::GenBarrier() {
Brian Carlstrom7940e442013-07-12 13:46:57 -070040 LIR* barrier = NewLIR0(kPseudoBarrier);
41 /* Mark all resources as being clobbered */
buzbeeb48819d2013-09-14 16:15:25 -070042 DCHECK(!barrier->flags.use_def_invalid);
43 barrier->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070044}
45
buzbee0d829482013-10-11 15:24:55 -070046// TODO: need to do some work to split out targets with
Brian Carlstrom7940e442013-07-12 13:46:57 -070047// condition codes and those without
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070048LIR* Mir2Lir::GenCheck(ConditionCode c_code, ThrowKind kind) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070049 DCHECK_NE(cu_->instruction_set, kMips);
50 LIR* tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_);
51 LIR* branch = OpCondBranch(c_code, tgt);
52 // Remember branch target - will process later
53 throw_launchpads_.Insert(tgt);
54 return branch;
55}
56
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070057LIR* Mir2Lir::GenImmedCheck(ConditionCode c_code, int reg, int imm_val, ThrowKind kind) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070058 LIR* tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_, reg, imm_val);
59 LIR* branch;
60 if (c_code == kCondAl) {
61 branch = OpUnconditionalBranch(tgt);
62 } else {
63 branch = OpCmpImmBranch(c_code, reg, imm_val, tgt);
64 }
65 // Remember branch target - will process later
66 throw_launchpads_.Insert(tgt);
67 return branch;
68}
69
Dave Allisonb373e092014-02-20 16:06:36 -080070
Brian Carlstrom7940e442013-07-12 13:46:57 -070071/* Perform null-check on a register. */
Dave Allisonb373e092014-02-20 16:06:36 -080072LIR* Mir2Lir::GenNullCheck(int m_reg, int opt_flags) {
73 if (Runtime::Current()->ExplicitNullChecks()) {
74 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
75 return NULL;
76 }
77 return GenImmedCheck(kCondEq, m_reg, 0, kThrowNullPointer);
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 }
Dave Allisonb373e092014-02-20 16:06:36 -080079 return nullptr;
80}
81
82void Mir2Lir::MarkPossibleNullPointerException(int opt_flags) {
83 if (!Runtime::Current()->ExplicitNullChecks()) {
84 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
85 return;
86 }
87 MarkSafepointPC(last_lir_insn_);
88 }
89}
90
91void Mir2Lir::MarkPossibleStackOverflowException() {
92 if (!Runtime::Current()->ExplicitStackOverflowChecks()) {
93 MarkSafepointPC(last_lir_insn_);
94 }
95}
96
97void Mir2Lir::ForceImplicitNullCheck(int reg, int opt_flags) {
98 if (!Runtime::Current()->ExplicitNullChecks()) {
99 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
100 return;
101 }
102 // Force an implicit null check by performing a memory operation (load) from the given
103 // register with offset 0. This will cause a signal if the register contains 0 (null).
104 int tmp = AllocTemp();
105 LIR* load = LoadWordDisp(reg, 0, tmp);
106 FreeTemp(tmp);
107 MarkSafepointPC(load);
108 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700109}
110
111/* Perform check on two registers */
112LIR* Mir2Lir::GenRegRegCheck(ConditionCode c_code, int reg1, int reg2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700113 ThrowKind kind) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700114 LIR* tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_, reg1, reg2);
115 LIR* branch = OpCmpBranch(c_code, reg1, reg2, tgt);
116 // Remember branch target - will process later
117 throw_launchpads_.Insert(tgt);
118 return branch;
119}
120
121void Mir2Lir::GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1,
122 RegLocation rl_src2, LIR* taken,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700123 LIR* fall_through) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700124 ConditionCode cond;
125 switch (opcode) {
126 case Instruction::IF_EQ:
127 cond = kCondEq;
128 break;
129 case Instruction::IF_NE:
130 cond = kCondNe;
131 break;
132 case Instruction::IF_LT:
133 cond = kCondLt;
134 break;
135 case Instruction::IF_GE:
136 cond = kCondGe;
137 break;
138 case Instruction::IF_GT:
139 cond = kCondGt;
140 break;
141 case Instruction::IF_LE:
142 cond = kCondLe;
143 break;
144 default:
145 cond = static_cast<ConditionCode>(0);
146 LOG(FATAL) << "Unexpected opcode " << opcode;
147 }
148
149 // Normalize such that if either operand is constant, src2 will be constant
150 if (rl_src1.is_const) {
151 RegLocation rl_temp = rl_src1;
152 rl_src1 = rl_src2;
153 rl_src2 = rl_temp;
154 cond = FlipComparisonOrder(cond);
155 }
156
157 rl_src1 = LoadValue(rl_src1, kCoreReg);
158 // Is this really an immediate comparison?
159 if (rl_src2.is_const) {
160 // If it's already live in a register or not easily materialized, just keep going
161 RegLocation rl_temp = UpdateLoc(rl_src2);
162 if ((rl_temp.location == kLocDalvikFrame) &&
163 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src2))) {
164 // OK - convert this to a compare immediate and branch
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000165 OpCmpImmBranch(cond, rl_src1.reg.GetReg(), mir_graph_->ConstantValue(rl_src2), taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700166 return;
167 }
168 }
169 rl_src2 = LoadValue(rl_src2, kCoreReg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000170 OpCmpBranch(cond, rl_src1.reg.GetReg(), rl_src2.reg.GetReg(), taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700171}
172
173void Mir2Lir::GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, LIR* taken,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700174 LIR* fall_through) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700175 ConditionCode cond;
176 rl_src = LoadValue(rl_src, kCoreReg);
177 switch (opcode) {
178 case Instruction::IF_EQZ:
179 cond = kCondEq;
180 break;
181 case Instruction::IF_NEZ:
182 cond = kCondNe;
183 break;
184 case Instruction::IF_LTZ:
185 cond = kCondLt;
186 break;
187 case Instruction::IF_GEZ:
188 cond = kCondGe;
189 break;
190 case Instruction::IF_GTZ:
191 cond = kCondGt;
192 break;
193 case Instruction::IF_LEZ:
194 cond = kCondLe;
195 break;
196 default:
197 cond = static_cast<ConditionCode>(0);
198 LOG(FATAL) << "Unexpected opcode " << opcode;
199 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000200 OpCmpImmBranch(cond, rl_src.reg.GetReg(), 0, taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700201}
202
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700203void Mir2Lir::GenIntToLong(RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
205 if (rl_src.location == kLocPhysReg) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000206 OpRegCopy(rl_result.reg.GetReg(), rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700207 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000208 LoadValueDirect(rl_src, rl_result.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700209 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000210 OpRegRegImm(kOpAsr, rl_result.reg.GetHighReg(), rl_result.reg.GetReg(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700211 StoreValueWide(rl_dest, rl_result);
212}
213
214void Mir2Lir::GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700215 RegLocation rl_src) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700216 rl_src = LoadValue(rl_src, kCoreReg);
217 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
218 OpKind op = kOpInvalid;
219 switch (opcode) {
220 case Instruction::INT_TO_BYTE:
221 op = kOp2Byte;
222 break;
223 case Instruction::INT_TO_SHORT:
224 op = kOp2Short;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700225 break;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700226 case Instruction::INT_TO_CHAR:
227 op = kOp2Char;
228 break;
229 default:
230 LOG(ERROR) << "Bad int conversion type";
231 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000232 OpRegReg(op, rl_result.reg.GetReg(), rl_src.reg.GetReg());
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700233 StoreValue(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700234}
235
236/*
237 * Let helper function take care of everything. Will call
238 * Array::AllocFromCode(type_idx, method, count);
239 * Note: AllocFromCode will handle checks for errNegativeArraySize.
240 */
241void Mir2Lir::GenNewArray(uint32_t type_idx, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700242 RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700243 FlushAllRegs(); /* Everything to home location */
Ian Rogers848871b2013-08-05 10:56:33 -0700244 ThreadOffset func_offset(-1);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800245 const DexFile* dex_file = cu_->dex_file;
246 CompilerDriver* driver = cu_->compiler_driver;
247 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *dex_file,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700248 type_idx)) {
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800249 bool is_type_initialized; // Ignored as an array does not have an initializer.
250 bool use_direct_type_ptr;
251 uintptr_t direct_type_ptr;
252 if (kEmbedClassInCode &&
253 driver->CanEmbedTypeInCode(*dex_file, type_idx,
254 &is_type_initialized, &use_direct_type_ptr, &direct_type_ptr)) {
255 // The fast path.
256 if (!use_direct_type_ptr) {
Mark Mendell55d0eac2014-02-06 11:02:52 -0800257 LoadClassType(type_idx, kArg0);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800258 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocArrayResolved);
259 CallRuntimeHelperRegMethodRegLocation(func_offset, TargetReg(kArg0), rl_src, true);
260 } else {
261 // Use the direct pointer.
262 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocArrayResolved);
263 CallRuntimeHelperImmMethodRegLocation(func_offset, direct_type_ptr, rl_src, true);
264 }
265 } else {
266 // The slow path.
267 DCHECK_EQ(func_offset.Int32Value(), -1);
268 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocArray);
269 CallRuntimeHelperImmMethodRegLocation(func_offset, type_idx, rl_src, true);
270 }
271 DCHECK_NE(func_offset.Int32Value(), -1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700272 } else {
Ian Rogers848871b2013-08-05 10:56:33 -0700273 func_offset= QUICK_ENTRYPOINT_OFFSET(pAllocArrayWithAccessCheck);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800274 CallRuntimeHelperImmMethodRegLocation(func_offset, type_idx, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700275 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700276 RegLocation rl_result = GetReturn(false);
277 StoreValue(rl_dest, rl_result);
278}
279
280/*
281 * Similar to GenNewArray, but with post-allocation initialization.
282 * Verifier guarantees we're dealing with an array class. Current
283 * code throws runtime exception "bad Filled array req" for 'D' and 'J'.
284 * Current code also throws internal unimp if not 'L', '[' or 'I'.
285 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700286void Mir2Lir::GenFilledNewArray(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700287 int elems = info->num_arg_words;
288 int type_idx = info->index;
289 FlushAllRegs(); /* Everything to home location */
Ian Rogers848871b2013-08-05 10:56:33 -0700290 ThreadOffset func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700291 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
292 type_idx)) {
Ian Rogers848871b2013-08-05 10:56:33 -0700293 func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArray);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700294 } else {
Ian Rogers848871b2013-08-05 10:56:33 -0700295 func_offset = QUICK_ENTRYPOINT_OFFSET(pCheckAndAllocArrayWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700296 }
297 CallRuntimeHelperImmMethodImm(func_offset, type_idx, elems, true);
298 FreeTemp(TargetReg(kArg2));
299 FreeTemp(TargetReg(kArg1));
300 /*
301 * NOTE: the implicit target for Instruction::FILLED_NEW_ARRAY is the
302 * return region. Because AllocFromCode placed the new array
303 * in kRet0, we'll just lock it into place. When debugger support is
304 * added, it may be necessary to additionally copy all return
305 * values to a home location in thread-local storage
306 */
307 LockTemp(TargetReg(kRet0));
308
309 // TODO: use the correct component size, currently all supported types
310 // share array alignment with ints (see comment at head of function)
311 size_t component_size = sizeof(int32_t);
312
313 // Having a range of 0 is legal
314 if (info->is_range && (elems > 0)) {
315 /*
316 * Bit of ugliness here. We're going generate a mem copy loop
317 * on the register range, but it is possible that some regs
318 * in the range have been promoted. This is unlikely, but
319 * before generating the copy, we'll just force a flush
320 * of any regs in the source range that have been promoted to
321 * home location.
322 */
323 for (int i = 0; i < elems; i++) {
324 RegLocation loc = UpdateLoc(info->args[i]);
325 if (loc.location == kLocPhysReg) {
326 StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low),
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000327 loc.reg.GetReg(), kWord);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700328 }
329 }
330 /*
331 * TUNING note: generated code here could be much improved, but
332 * this is an uncommon operation and isn't especially performance
333 * critical.
334 */
335 int r_src = AllocTemp();
336 int r_dst = AllocTemp();
337 int r_idx = AllocTemp();
338 int r_val = INVALID_REG;
Brian Carlstromdf629502013-07-17 22:39:56 -0700339 switch (cu_->instruction_set) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700340 case kThumb2:
341 r_val = TargetReg(kLr);
342 break;
343 case kX86:
344 FreeTemp(TargetReg(kRet0));
345 r_val = AllocTemp();
346 break;
347 case kMips:
348 r_val = AllocTemp();
349 break;
350 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
351 }
352 // Set up source pointer
353 RegLocation rl_first = info->args[0];
354 OpRegRegImm(kOpAdd, r_src, TargetReg(kSp), SRegOffset(rl_first.s_reg_low));
355 // Set up the target pointer
356 OpRegRegImm(kOpAdd, r_dst, TargetReg(kRet0),
357 mirror::Array::DataOffset(component_size).Int32Value());
358 // Set up the loop counter (known to be > 0)
359 LoadConstant(r_idx, elems - 1);
360 // Generate the copy loop. Going backwards for convenience
361 LIR* target = NewLIR0(kPseudoTargetLabel);
362 // Copy next element
363 LoadBaseIndexed(r_src, r_idx, r_val, 2, kWord);
364 StoreBaseIndexed(r_dst, r_idx, r_val, 2, kWord);
365 FreeTemp(r_val);
366 OpDecAndBranch(kCondGe, r_idx, target);
367 if (cu_->instruction_set == kX86) {
368 // Restore the target pointer
369 OpRegRegImm(kOpAdd, TargetReg(kRet0), r_dst,
370 -mirror::Array::DataOffset(component_size).Int32Value());
371 }
372 } else if (!info->is_range) {
373 // TUNING: interleave
374 for (int i = 0; i < elems; i++) {
375 RegLocation rl_arg = LoadValue(info->args[i], kCoreReg);
376 StoreBaseDisp(TargetReg(kRet0),
377 mirror::Array::DataOffset(component_size).Int32Value() +
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000378 i * 4, rl_arg.reg.GetReg(), kWord);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700379 // If the LoadValue caused a temp to be allocated, free it
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000380 if (IsTemp(rl_arg.reg.GetReg())) {
381 FreeTemp(rl_arg.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700382 }
383 }
384 }
385 if (info->result.location != kLocInvalid) {
386 StoreValue(info->result, GetReturn(false /* not fp */));
387 }
388}
389
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800390//
391// Slow path to ensure a class is initialized for sget/sput.
392//
393class StaticFieldSlowPath : public Mir2Lir::LIRSlowPath {
394 public:
395 StaticFieldSlowPath(Mir2Lir* m2l, LIR* unresolved, LIR* uninit, LIR* cont,
396 int storage_index, int r_base) :
397 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), unresolved, cont), uninit_(uninit), storage_index_(storage_index),
398 r_base_(r_base) {
399 }
400
401 void Compile() {
402 LIR* unresolved_target = GenerateTargetLabel();
403 uninit_->target = unresolved_target;
404 m2l_->CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeStaticStorage),
405 storage_index_, true);
406 // Copy helper's result into r_base, a no-op on all but MIPS.
407 m2l_->OpRegCopy(r_base_, m2l_->TargetReg(kRet0));
408
409 m2l_->OpUnconditionalBranch(cont_);
410 }
411
412 private:
413 LIR* const uninit_;
414 const int storage_index_;
415 const int r_base_;
416};
417
Vladimir Markobe0e5462014-02-26 11:24:15 +0000418void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700419 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000420 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
421 cu_->compiler_driver->ProcessedStaticField(field_info.FastPut(), field_info.IsReferrersClass());
422 if (field_info.FastPut() && !SLOW_FIELD_PATH) {
423 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800424 int r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000425 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426 // Fast path, static storage base is this method's class
427 RegLocation rl_method = LoadCurrMethod();
Ian Rogers5ddb4102014-01-07 08:58:46 -0800428 r_base = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000429 LoadWordDisp(rl_method.reg.GetReg(),
Ian Rogers5ddb4102014-01-07 08:58:46 -0800430 mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000431 if (IsTemp(rl_method.reg.GetReg())) {
432 FreeTemp(rl_method.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433 }
434 } else {
435 // Medium path, static storage base in a different class which requires checks that the other
436 // class is initialized.
437 // TODO: remove initialized check now that we are initializing classes in the compiler driver.
Vladimir Markobe0e5462014-02-26 11:24:15 +0000438 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 // May do runtime call so everything to home locations.
440 FlushAllRegs();
441 // Using fixed register to sync with possible call to runtime support.
442 int r_method = TargetReg(kArg1);
443 LockTemp(r_method);
444 LoadCurrMethodDirect(r_method);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800445 r_base = TargetReg(kArg0);
446 LockTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700447 LoadWordDisp(r_method,
Ian Rogers5ddb4102014-01-07 08:58:46 -0800448 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
449 r_base);
450 LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
Vladimir Markobe0e5462014-02-26 11:24:15 +0000451 sizeof(int32_t*) * field_info.StorageIndex(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800452 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Markobfea9c22014-01-17 17:49:33 +0000453 if (!field_info.IsInitialized() &&
454 (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
Ian Rogers5ddb4102014-01-07 08:58:46 -0800455 // Check if r_base is NULL or a not yet initialized class.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800456
457 // The slow path is invoked if the r_base is NULL or the class pointed
458 // to by it is not initialized.
Ian Rogers5ddb4102014-01-07 08:58:46 -0800459 LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
460 int r_tmp = TargetReg(kArg2);
461 LockTemp(r_tmp);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800462 LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800463 mirror::Class::StatusOffset().Int32Value(),
464 mirror::Class::kStatusInitialized, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800465 LIR* cont = NewLIR0(kPseudoTargetLabel);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800466
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800467 AddSlowPath(new (arena_) StaticFieldSlowPath(this,
468 unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000469 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800470
471 FreeTemp(r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700472 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 FreeTemp(r_method);
474 }
475 // rBase now holds static storage base
476 if (is_long_or_double) {
477 rl_src = LoadValueWide(rl_src, kAnyReg);
478 } else {
479 rl_src = LoadValue(rl_src, kAnyReg);
480 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000481 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 GenMemBarrier(kStoreStore);
483 }
484 if (is_long_or_double) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000485 StoreBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg.GetReg(),
486 rl_src.reg.GetHighReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700487 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000488 StoreWordDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700489 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000490 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 GenMemBarrier(kStoreLoad);
492 }
493 if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000494 MarkGCCard(rl_src.reg.GetReg(), r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700495 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800496 FreeTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700497 } else {
498 FlushAllRegs(); // Everything to home locations
Ian Rogers848871b2013-08-05 10:56:33 -0700499 ThreadOffset setter_offset =
500 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Static)
501 : (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjStatic)
502 : QUICK_ENTRYPOINT_OFFSET(pSet32Static));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000503 CallRuntimeHelperImmRegLocation(setter_offset, field_info.FieldIndex(), rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 }
505}
506
Vladimir Markobe0e5462014-02-26 11:24:15 +0000507void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700508 bool is_long_or_double, bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000509 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
510 cu_->compiler_driver->ProcessedStaticField(field_info.FastGet(), field_info.IsReferrersClass());
511 if (field_info.FastGet() && !SLOW_FIELD_PATH) {
512 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800513 int r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000514 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700515 // Fast path, static storage base is this method's class
516 RegLocation rl_method = LoadCurrMethod();
Ian Rogers5ddb4102014-01-07 08:58:46 -0800517 r_base = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000518 LoadWordDisp(rl_method.reg.GetReg(),
Ian Rogers5ddb4102014-01-07 08:58:46 -0800519 mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 } else {
521 // Medium path, static storage base in a different class which requires checks that the other
522 // class is initialized
Vladimir Markobe0e5462014-02-26 11:24:15 +0000523 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700524 // May do runtime call so everything to home locations.
525 FlushAllRegs();
526 // Using fixed register to sync with possible call to runtime support.
527 int r_method = TargetReg(kArg1);
528 LockTemp(r_method);
529 LoadCurrMethodDirect(r_method);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800530 r_base = TargetReg(kArg0);
531 LockTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 LoadWordDisp(r_method,
Ian Rogers5ddb4102014-01-07 08:58:46 -0800533 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
534 r_base);
535 LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
Vladimir Markobe0e5462014-02-26 11:24:15 +0000536 sizeof(int32_t*) * field_info.StorageIndex(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800537 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Markobfea9c22014-01-17 17:49:33 +0000538 if (!field_info.IsInitialized() &&
539 (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
Ian Rogers5ddb4102014-01-07 08:58:46 -0800540 // Check if r_base is NULL or a not yet initialized class.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800541
542 // The slow path is invoked if the r_base is NULL or the class pointed
543 // to by it is not initialized.
Ian Rogers5ddb4102014-01-07 08:58:46 -0800544 LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
545 int r_tmp = TargetReg(kArg2);
546 LockTemp(r_tmp);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800547 LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800548 mirror::Class::StatusOffset().Int32Value(),
549 mirror::Class::kStatusInitialized, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800550 LIR* cont = NewLIR0(kPseudoTargetLabel);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800551
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800552 AddSlowPath(new (arena_) StaticFieldSlowPath(this,
553 unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000554 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800555
556 FreeTemp(r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700557 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 FreeTemp(r_method);
559 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800560 // r_base now holds static storage base
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000562 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700563 GenMemBarrier(kLoadLoad);
564 }
565 if (is_long_or_double) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000566 LoadBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg.GetReg(),
567 rl_result.reg.GetHighReg(), INVALID_SREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000569 LoadWordDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800571 FreeTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700572 if (is_long_or_double) {
573 StoreValueWide(rl_dest, rl_result);
574 } else {
575 StoreValue(rl_dest, rl_result);
576 }
577 } else {
578 FlushAllRegs(); // Everything to home locations
Ian Rogers848871b2013-08-05 10:56:33 -0700579 ThreadOffset getterOffset =
580 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Static)
581 :(is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjStatic)
582 : QUICK_ENTRYPOINT_OFFSET(pGet32Static));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000583 CallRuntimeHelperImm(getterOffset, field_info.FieldIndex(), true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700584 if (is_long_or_double) {
585 RegLocation rl_result = GetReturnWide(rl_dest.fp);
586 StoreValueWide(rl_dest, rl_result);
587 } else {
588 RegLocation rl_result = GetReturn(rl_dest.fp);
589 StoreValue(rl_dest, rl_result);
590 }
591 }
592}
593
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800594// Generate code for all slow paths.
595void Mir2Lir::HandleSlowPaths() {
596 int n = slow_paths_.Size();
597 for (int i = 0; i < n; ++i) {
598 LIRSlowPath* slowpath = slow_paths_.Get(i);
599 slowpath->Compile();
600 }
601 slow_paths_.Reset();
602}
603
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700604void Mir2Lir::HandleSuspendLaunchPads() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605 int num_elems = suspend_launchpads_.Size();
Ian Rogers848871b2013-08-05 10:56:33 -0700606 ThreadOffset helper_offset = QUICK_ENTRYPOINT_OFFSET(pTestSuspend);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 for (int i = 0; i < num_elems; i++) {
608 ResetRegPool();
609 ResetDefTracking();
610 LIR* lab = suspend_launchpads_.Get(i);
buzbee0d829482013-10-11 15:24:55 -0700611 LIR* resume_lab = reinterpret_cast<LIR*>(UnwrapPointer(lab->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 current_dalvik_offset_ = lab->operands[1];
613 AppendLIR(lab);
614 int r_tgt = CallHelperSetup(helper_offset);
615 CallHelper(r_tgt, helper_offset, true /* MarkSafepointPC */);
616 OpUnconditionalBranch(resume_lab);
617 }
618}
619
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700620void Mir2Lir::HandleThrowLaunchPads() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700621 int num_elems = throw_launchpads_.Size();
622 for (int i = 0; i < num_elems; i++) {
623 ResetRegPool();
624 ResetDefTracking();
625 LIR* lab = throw_launchpads_.Get(i);
626 current_dalvik_offset_ = lab->operands[1];
627 AppendLIR(lab);
Ian Rogers848871b2013-08-05 10:56:33 -0700628 ThreadOffset func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 int v1 = lab->operands[2];
630 int v2 = lab->operands[3];
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700631 const bool target_x86 = cu_->instruction_set == kX86;
632 const bool target_arm = cu_->instruction_set == kArm || cu_->instruction_set == kThumb2;
633 const bool target_mips = cu_->instruction_set == kMips;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700634 switch (lab->operands[0]) {
635 case kThrowNullPointer:
Ian Rogers848871b2013-08-05 10:56:33 -0700636 func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowNullPointer);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700638 case kThrowConstantArrayBounds: // v1 is length reg (for Arm/Mips), v2 constant index
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 // v1 holds the constant array index. Mips/Arm uses v2 for length, x86 reloads.
640 if (target_x86) {
641 OpRegMem(kOpMov, TargetReg(kArg1), v1, mirror::Array::LengthOffset().Int32Value());
642 } else {
643 OpRegCopy(TargetReg(kArg1), v1);
644 }
645 // Make sure the following LoadConstant doesn't mess with kArg1.
646 LockTemp(TargetReg(kArg1));
647 LoadConstant(TargetReg(kArg0), v2);
Ian Rogers848871b2013-08-05 10:56:33 -0700648 func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBounds);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 break;
650 case kThrowArrayBounds:
651 // Move v1 (array index) to kArg0 and v2 (array length) to kArg1
652 if (v2 != TargetReg(kArg0)) {
653 OpRegCopy(TargetReg(kArg0), v1);
654 if (target_x86) {
655 // x86 leaves the array pointer in v2, so load the array length that the handler expects
656 OpRegMem(kOpMov, TargetReg(kArg1), v2, mirror::Array::LengthOffset().Int32Value());
657 } else {
658 OpRegCopy(TargetReg(kArg1), v2);
659 }
660 } else {
661 if (v1 == TargetReg(kArg1)) {
662 // Swap v1 and v2, using kArg2 as a temp
663 OpRegCopy(TargetReg(kArg2), v1);
664 if (target_x86) {
665 // x86 leaves the array pointer in v2; load the array length that the handler expects
666 OpRegMem(kOpMov, TargetReg(kArg1), v2, mirror::Array::LengthOffset().Int32Value());
667 } else {
668 OpRegCopy(TargetReg(kArg1), v2);
669 }
670 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2));
671 } else {
672 if (target_x86) {
673 // x86 leaves the array pointer in v2; load the array length that the handler expects
674 OpRegMem(kOpMov, TargetReg(kArg1), v2, mirror::Array::LengthOffset().Int32Value());
675 } else {
676 OpRegCopy(TargetReg(kArg1), v2);
677 }
678 OpRegCopy(TargetReg(kArg0), v1);
679 }
680 }
Ian Rogers848871b2013-08-05 10:56:33 -0700681 func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowArrayBounds);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700682 break;
683 case kThrowDivZero:
Ian Rogers848871b2013-08-05 10:56:33 -0700684 func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowDivZero);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685 break;
686 case kThrowNoSuchMethod:
687 OpRegCopy(TargetReg(kArg0), v1);
688 func_offset =
Ian Rogers848871b2013-08-05 10:56:33 -0700689 QUICK_ENTRYPOINT_OFFSET(pThrowNoSuchMethod);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700690 break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700691 case kThrowStackOverflow: {
Ian Rogers848871b2013-08-05 10:56:33 -0700692 func_offset = QUICK_ENTRYPOINT_OFFSET(pThrowStackOverflow);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 // Restore stack alignment
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700694 int r_tgt = 0;
695 const int spill_size = (num_core_spills_ + num_fp_spills_) * 4;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700696 if (target_x86) {
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700697 // - 4 to leave link register on stack.
698 OpRegImm(kOpAdd, TargetReg(kSp), frame_size_ - 4);
699 ClobberCallerSave();
700 } else if (target_arm) {
701 r_tgt = r12;
702 LoadWordDisp(TargetReg(kSp), spill_size - 4, TargetReg(kLr));
703 OpRegImm(kOpAdd, TargetReg(kSp), spill_size);
704 ClobberCallerSave();
705 LoadWordDisp(rARM_SELF, func_offset.Int32Value(), r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 } else {
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700707 DCHECK(target_mips);
708 DCHECK_EQ(num_fp_spills_, 0); // FP spills currently don't happen on mips.
709 // LR is offset 0 since we push in reverse order.
710 LoadWordDisp(TargetReg(kSp), 0, TargetReg(kLr));
711 OpRegImm(kOpAdd, TargetReg(kSp), spill_size);
712 ClobberCallerSave();
713 r_tgt = CallHelperSetup(func_offset); // Doesn't clobber LR.
714 DCHECK_NE(r_tgt, TargetReg(kLr));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700715 }
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700716 CallHelper(r_tgt, func_offset, false /* MarkSafepointPC */, false /* UseLink */);
717 continue;
718 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 default:
720 LOG(FATAL) << "Unexpected throw kind: " << lab->operands[0];
721 }
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000722 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700723 int r_tgt = CallHelperSetup(func_offset);
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700724 CallHelper(r_tgt, func_offset, true /* MarkSafepointPC */, true /* UseLink */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 }
726}
727
Vladimir Markobe0e5462014-02-26 11:24:15 +0000728void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 RegLocation rl_dest, RegLocation rl_obj, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700730 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000731 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
732 cu_->compiler_driver->ProcessedInstanceField(field_info.FastGet());
733 if (field_info.FastGet() && !SLOW_FIELD_PATH) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 RegLocation rl_result;
735 RegisterClass reg_class = oat_reg_class_by_size(size);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000736 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737 rl_obj = LoadValue(rl_obj, kCoreReg);
738 if (is_long_or_double) {
739 DCHECK(rl_dest.wide);
Dave Allisonb373e092014-02-20 16:06:36 -0800740 GenNullCheck(rl_obj.reg.GetReg(), opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741 if (cu_->instruction_set == kX86) {
742 rl_result = EvalLoc(rl_dest, reg_class, true);
Dave Allisonb373e092014-02-20 16:06:36 -0800743 GenNullCheck(rl_obj.reg.GetReg(), opt_flags);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000744 LoadBaseDispWide(rl_obj.reg.GetReg(), field_info.FieldOffset().Int32Value(),
Dave Allisonb373e092014-02-20 16:06:36 -0800745 rl_result.reg.GetReg(),
746 rl_result.reg.GetHighReg(), rl_obj.s_reg_low);
747 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000748 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700749 GenMemBarrier(kLoadLoad);
750 }
751 } else {
752 int reg_ptr = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000753 OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg.GetReg(), field_info.FieldOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700754 rl_result = EvalLoc(rl_dest, reg_class, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000755 LoadBaseDispWide(reg_ptr, 0, rl_result.reg.GetReg(), rl_result.reg.GetHighReg(),
756 INVALID_SREG);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000757 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700758 GenMemBarrier(kLoadLoad);
759 }
760 FreeTemp(reg_ptr);
761 }
762 StoreValueWide(rl_dest, rl_result);
763 } else {
764 rl_result = EvalLoc(rl_dest, reg_class, true);
Dave Allisonb373e092014-02-20 16:06:36 -0800765 GenNullCheck(rl_obj.reg.GetReg(), opt_flags);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000766 LoadBaseDisp(rl_obj.reg.GetReg(), field_info.FieldOffset().Int32Value(),
767 rl_result.reg.GetReg(), kWord, rl_obj.s_reg_low);
Dave Allisonb373e092014-02-20 16:06:36 -0800768 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000769 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 GenMemBarrier(kLoadLoad);
771 }
772 StoreValue(rl_dest, rl_result);
773 }
774 } else {
Ian Rogers848871b2013-08-05 10:56:33 -0700775 ThreadOffset getterOffset =
776 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Instance)
777 : (is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjInstance)
778 : QUICK_ENTRYPOINT_OFFSET(pGet32Instance));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000779 CallRuntimeHelperImmRegLocation(getterOffset, field_info.FieldIndex(), rl_obj, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700780 if (is_long_or_double) {
781 RegLocation rl_result = GetReturnWide(rl_dest.fp);
782 StoreValueWide(rl_dest, rl_result);
783 } else {
784 RegLocation rl_result = GetReturn(rl_dest.fp);
785 StoreValue(rl_dest, rl_result);
786 }
787 }
788}
789
Vladimir Markobe0e5462014-02-26 11:24:15 +0000790void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 RegLocation rl_src, RegLocation rl_obj, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700792 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000793 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
794 cu_->compiler_driver->ProcessedInstanceField(field_info.FastPut());
795 if (field_info.FastPut() && !SLOW_FIELD_PATH) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 RegisterClass reg_class = oat_reg_class_by_size(size);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000797 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 rl_obj = LoadValue(rl_obj, kCoreReg);
799 if (is_long_or_double) {
800 int reg_ptr;
801 rl_src = LoadValueWide(rl_src, kAnyReg);
Dave Allisonb373e092014-02-20 16:06:36 -0800802 GenNullCheck(rl_obj.reg.GetReg(), opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 reg_ptr = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000804 OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg.GetReg(), field_info.FieldOffset().Int32Value());
Vladimir Markobe0e5462014-02-26 11:24:15 +0000805 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700806 GenMemBarrier(kStoreStore);
807 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000808 StoreBaseDispWide(reg_ptr, 0, rl_src.reg.GetReg(), rl_src.reg.GetHighReg());
Dave Allisonb373e092014-02-20 16:06:36 -0800809 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000810 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700811 GenMemBarrier(kLoadLoad);
812 }
813 FreeTemp(reg_ptr);
814 } else {
815 rl_src = LoadValue(rl_src, reg_class);
Dave Allisonb373e092014-02-20 16:06:36 -0800816 GenNullCheck(rl_obj.reg.GetReg(), opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000817 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 GenMemBarrier(kStoreStore);
819 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000820 StoreBaseDisp(rl_obj.reg.GetReg(), field_info.FieldOffset().Int32Value(),
Dave Allisonb373e092014-02-20 16:06:36 -0800821 rl_src.reg.GetReg(), kWord);
822 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000823 if (field_info.IsVolatile()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700824 GenMemBarrier(kLoadLoad);
825 }
826 if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000827 MarkGCCard(rl_src.reg.GetReg(), rl_obj.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700828 }
829 }
830 } else {
Ian Rogers848871b2013-08-05 10:56:33 -0700831 ThreadOffset setter_offset =
832 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Instance)
833 : (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjInstance)
834 : QUICK_ENTRYPOINT_OFFSET(pSet32Instance));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000835 CallRuntimeHelperImmRegLocationRegLocation(setter_offset, field_info.FieldIndex(),
836 rl_obj, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 }
838}
839
Ian Rogersa9a82542013-10-04 11:17:26 -0700840void Mir2Lir::GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index,
841 RegLocation rl_src) {
842 bool needs_range_check = !(opt_flags & MIR_IGNORE_RANGE_CHECK);
843 bool needs_null_check = !((cu_->disable_opt & (1 << kNullCheckElimination)) &&
844 (opt_flags & MIR_IGNORE_NULL_CHECK));
845 ThreadOffset helper = needs_range_check
846 ? (needs_null_check ? QUICK_ENTRYPOINT_OFFSET(pAputObjectWithNullAndBoundCheck)
847 : QUICK_ENTRYPOINT_OFFSET(pAputObjectWithBoundCheck))
848 : QUICK_ENTRYPOINT_OFFSET(pAputObject);
849 CallRuntimeHelperRegLocationRegLocationRegLocation(helper, rl_array, rl_index, rl_src, true);
850}
851
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700852void Mir2Lir::GenConstClass(uint32_t type_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700853 RegLocation rl_method = LoadCurrMethod();
854 int res_reg = AllocTemp();
855 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
856 if (!cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
857 *cu_->dex_file,
858 type_idx)) {
859 // Call out to helper which resolves type and verifies access.
860 // Resolved type returned in kRet0.
Ian Rogers848871b2013-08-05 10:56:33 -0700861 CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000862 type_idx, rl_method.reg.GetReg(), true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700863 RegLocation rl_result = GetReturn(false);
864 StoreValue(rl_dest, rl_result);
865 } else {
866 // We're don't need access checks, load type from dex cache
867 int32_t dex_cache_offset =
Brian Carlstromea46f952013-07-30 01:26:50 -0700868 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000869 LoadWordDisp(rl_method.reg.GetReg(), dex_cache_offset, res_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700870 int32_t offset_of_type =
871 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() + (sizeof(mirror::Class*)
872 * type_idx);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000873 LoadWordDisp(res_reg, offset_of_type, rl_result.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700874 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file,
875 type_idx) || SLOW_TYPE_PATH) {
876 // Slow path, at runtime test if type is null and if so initialize
877 FlushAllRegs();
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000878 LIR* branch = OpCmpImmBranch(kCondEq, rl_result.reg.GetReg(), 0, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800879 LIR* cont = NewLIR0(kPseudoTargetLabel);
880
881 // Object to generate the slow path for class resolution.
882 class SlowPath : public LIRSlowPath {
883 public:
884 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, const int type_idx,
885 const RegLocation& rl_method, const RegLocation& rl_result) :
886 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), type_idx_(type_idx),
887 rl_method_(rl_method), rl_result_(rl_result) {
888 }
889
890 void Compile() {
891 GenerateTargetLabel();
892
893 m2l_->CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx_,
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000894 rl_method_.reg.GetReg(), true);
895 m2l_->OpRegCopy(rl_result_.reg.GetReg(), m2l_->TargetReg(kRet0));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800896
897 m2l_->OpUnconditionalBranch(cont_);
898 }
899
900 private:
901 const int type_idx_;
902 const RegLocation rl_method_;
903 const RegLocation rl_result_;
904 };
905
906 // Add to list for future.
907 AddSlowPath(new (arena_) SlowPath(this, branch, cont,
908 type_idx, rl_method, rl_result));
909
Brian Carlstrom7940e442013-07-12 13:46:57 -0700910 StoreValue(rl_dest, rl_result);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800911 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700912 // Fast path, we're done - just store result
913 StoreValue(rl_dest, rl_result);
914 }
915 }
916}
917
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700918void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700919 /* NOTE: Most strings should be available at compile time */
920 int32_t offset_of_string = mirror::Array::DataOffset(sizeof(mirror::String*)).Int32Value() +
921 (sizeof(mirror::String*) * string_idx);
922 if (!cu_->compiler_driver->CanAssumeStringIsPresentInDexCache(
923 *cu_->dex_file, string_idx) || SLOW_STRING_PATH) {
924 // slow path, resolve string if not in dex cache
925 FlushAllRegs();
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700926 LockCallTemps(); // Using explicit registers
Mark Mendell766e9292014-01-27 07:55:47 -0800927
928 // If the Method* is already in a register, we can save a copy.
929 RegLocation rl_method = mir_graph_->GetMethodLoc();
930 int r_method;
931 if (rl_method.location == kLocPhysReg) {
932 // A temp would conflict with register use below.
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000933 DCHECK(!IsTemp(rl_method.reg.GetReg()));
934 r_method = rl_method.reg.GetReg();
Mark Mendell766e9292014-01-27 07:55:47 -0800935 } else {
936 r_method = TargetReg(kArg2);
937 LoadCurrMethodDirect(r_method);
938 }
939 LoadWordDisp(r_method, mirror::ArtMethod::DexCacheStringsOffset().Int32Value(),
940 TargetReg(kArg0));
941
Brian Carlstrom7940e442013-07-12 13:46:57 -0700942 // Might call out to helper, which will return resolved string in kRet0
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943 LoadWordDisp(TargetReg(kArg0), offset_of_string, TargetReg(kRet0));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800944 if (cu_->instruction_set == kThumb2 ||
945 cu_->instruction_set == kMips) {
946 // OpRegImm(kOpCmp, TargetReg(kRet0), 0); // Is resolved?
Mark Mendell766e9292014-01-27 07:55:47 -0800947 LoadConstant(TargetReg(kArg1), string_idx);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800948 LIR* fromfast = OpCmpImmBranch(kCondEq, TargetReg(kRet0), 0, NULL);
949 LIR* cont = NewLIR0(kPseudoTargetLabel);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950 GenBarrier();
Mark Mendell766e9292014-01-27 07:55:47 -0800951
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800952 // Object to generate the slow path for string resolution.
953 class SlowPath : public LIRSlowPath {
954 public:
955 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, int r_method) :
956 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), r_method_(r_method) {
957 }
958
959 void Compile() {
960 GenerateTargetLabel();
961
962 int r_tgt = m2l_->CallHelperSetup(QUICK_ENTRYPOINT_OFFSET(pResolveString));
963
964 m2l_->OpRegCopy(m2l_->TargetReg(kArg0), r_method_); // .eq
965 LIR* call_inst = m2l_->OpReg(kOpBlx, r_tgt);
966 m2l_->MarkSafepointPC(call_inst);
967 m2l_->FreeTemp(r_tgt);
968
969 m2l_->OpUnconditionalBranch(cont_);
970 }
971
972 private:
973 int r_method_;
974 };
975
976 // Add to list for future.
977 AddSlowPath(new (arena_) SlowPath(this, fromfast, cont, r_method));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700978 } else {
979 DCHECK_EQ(cu_->instruction_set, kX86);
Mark Mendell766e9292014-01-27 07:55:47 -0800980 LIR* branch = OpCmpImmBranch(kCondNe, TargetReg(kRet0), 0, NULL);
981 LoadConstant(TargetReg(kArg1), string_idx);
982 CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pResolveString), r_method,
Ian Rogers7655f292013-07-29 11:07:13 -0700983 TargetReg(kArg1), true);
Mark Mendell766e9292014-01-27 07:55:47 -0800984 LIR* target = NewLIR0(kPseudoTargetLabel);
985 branch->target = target;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700986 }
987 GenBarrier();
988 StoreValue(rl_dest, GetReturn(false));
989 } else {
990 RegLocation rl_method = LoadCurrMethod();
991 int res_reg = AllocTemp();
992 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000993 LoadWordDisp(rl_method.reg.GetReg(),
Brian Carlstromea46f952013-07-30 01:26:50 -0700994 mirror::ArtMethod::DexCacheStringsOffset().Int32Value(), res_reg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +0000995 LoadWordDisp(res_reg, offset_of_string, rl_result.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700996 StoreValue(rl_dest, rl_result);
997 }
998}
999
1000/*
1001 * Let helper function take care of everything. Will
1002 * call Class::NewInstanceFromCode(type_idx, method);
1003 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001004void Mir2Lir::GenNewInstance(uint32_t type_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001005 FlushAllRegs(); /* Everything to home location */
1006 // alloc will always check for resolution, do we also need to verify
1007 // access because the verifier was unable to?
Ian Rogers848871b2013-08-05 10:56:33 -07001008 ThreadOffset func_offset(-1);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001009 const DexFile* dex_file = cu_->dex_file;
1010 CompilerDriver* driver = cu_->compiler_driver;
1011 if (driver->CanAccessInstantiableTypeWithoutChecks(
1012 cu_->method_idx, *dex_file, type_idx)) {
1013 bool is_type_initialized;
1014 bool use_direct_type_ptr;
1015 uintptr_t direct_type_ptr;
1016 if (kEmbedClassInCode &&
1017 driver->CanEmbedTypeInCode(*dex_file, type_idx,
1018 &is_type_initialized, &use_direct_type_ptr, &direct_type_ptr)) {
1019 // The fast path.
1020 if (!use_direct_type_ptr) {
Mark Mendell55d0eac2014-02-06 11:02:52 -08001021 LoadClassType(type_idx, kArg0);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001022 if (!is_type_initialized) {
1023 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectResolved);
1024 CallRuntimeHelperRegMethod(func_offset, TargetReg(kArg0), true);
1025 } else {
1026 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectInitialized);
1027 CallRuntimeHelperRegMethod(func_offset, TargetReg(kArg0), true);
1028 }
1029 } else {
1030 // Use the direct pointer.
1031 if (!is_type_initialized) {
1032 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectResolved);
1033 CallRuntimeHelperImmMethod(func_offset, direct_type_ptr, true);
1034 } else {
1035 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectInitialized);
1036 CallRuntimeHelperImmMethod(func_offset, direct_type_ptr, true);
1037 }
1038 }
1039 } else {
1040 // The slow path.
1041 DCHECK_EQ(func_offset.Int32Value(), -1);
1042 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObject);
1043 CallRuntimeHelperImmMethod(func_offset, type_idx, true);
1044 }
1045 DCHECK_NE(func_offset.Int32Value(), -1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001046 } else {
Ian Rogers848871b2013-08-05 10:56:33 -07001047 func_offset = QUICK_ENTRYPOINT_OFFSET(pAllocObjectWithAccessCheck);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001048 CallRuntimeHelperImmMethod(func_offset, type_idx, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001049 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050 RegLocation rl_result = GetReturn(false);
1051 StoreValue(rl_dest, rl_result);
1052}
1053
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001054void Mir2Lir::GenThrow(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001055 FlushAllRegs();
Ian Rogers7655f292013-07-29 11:07:13 -07001056 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(pDeliverException), rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057}
1058
1059// For final classes there are no sub-classes to check and so we can answer the instance-of
1060// question with simple comparisons.
1061void Mir2Lir::GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, RegLocation rl_dest,
1062 RegLocation rl_src) {
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001063 // X86 has its own implementation.
1064 DCHECK_NE(cu_->instruction_set, kX86);
1065
Brian Carlstrom7940e442013-07-12 13:46:57 -07001066 RegLocation object = LoadValue(rl_src, kCoreReg);
1067 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001068 int result_reg = rl_result.reg.GetReg();
1069 if (result_reg == object.reg.GetReg()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001070 result_reg = AllocTypedTemp(false, kCoreReg);
1071 }
1072 LoadConstant(result_reg, 0); // assume false
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001073 LIR* null_branchover = OpCmpImmBranch(kCondEq, object.reg.GetReg(), 0, NULL);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001074
1075 int check_class = AllocTypedTemp(false, kCoreReg);
1076 int object_class = AllocTypedTemp(false, kCoreReg);
1077
1078 LoadCurrMethodDirect(check_class);
1079 if (use_declaring_class) {
Brian Carlstromea46f952013-07-30 01:26:50 -07001080 LoadWordDisp(check_class, mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001081 check_class);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001082 LoadWordDisp(object.reg.GetReg(), mirror::Object::ClassOffset().Int32Value(), object_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001083 } else {
Brian Carlstromea46f952013-07-30 01:26:50 -07001084 LoadWordDisp(check_class, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 check_class);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001086 LoadWordDisp(object.reg.GetReg(), mirror::Object::ClassOffset().Int32Value(), object_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087 int32_t offset_of_type =
1088 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() +
1089 (sizeof(mirror::Class*) * type_idx);
1090 LoadWordDisp(check_class, offset_of_type, check_class);
1091 }
1092
1093 LIR* ne_branchover = NULL;
1094 if (cu_->instruction_set == kThumb2) {
1095 OpRegReg(kOpCmp, check_class, object_class); // Same?
1096 OpIT(kCondEq, ""); // if-convert the test
1097 LoadConstant(result_reg, 1); // .eq case - load true
1098 } else {
1099 ne_branchover = OpCmpBranch(kCondNe, check_class, object_class, NULL);
1100 LoadConstant(result_reg, 1); // eq case - load true
1101 }
1102 LIR* target = NewLIR0(kPseudoTargetLabel);
1103 null_branchover->target = target;
1104 if (ne_branchover != NULL) {
1105 ne_branchover->target = target;
1106 }
1107 FreeTemp(object_class);
1108 FreeTemp(check_class);
1109 if (IsTemp(result_reg)) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001110 OpRegCopy(rl_result.reg.GetReg(), result_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001111 FreeTemp(result_reg);
1112 }
1113 StoreValue(rl_dest, rl_result);
1114}
1115
1116void Mir2Lir::GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final,
1117 bool type_known_abstract, bool use_declaring_class,
1118 bool can_assume_type_is_in_dex_cache,
1119 uint32_t type_idx, RegLocation rl_dest,
1120 RegLocation rl_src) {
Mark Mendell6607d972014-02-10 06:54:18 -08001121 // X86 has its own implementation.
1122 DCHECK_NE(cu_->instruction_set, kX86);
1123
Brian Carlstrom7940e442013-07-12 13:46:57 -07001124 FlushAllRegs();
1125 // May generate a call - use explicit registers
1126 LockCallTemps();
1127 LoadCurrMethodDirect(TargetReg(kArg1)); // kArg1 <= current Method*
1128 int class_reg = TargetReg(kArg2); // kArg2 will hold the Class*
1129 if (needs_access_check) {
1130 // Check we have access to type_idx and if not throw IllegalAccessError,
1131 // returns Class* in kArg0
Ian Rogers848871b2013-08-05 10:56:33 -07001132 CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001133 type_idx, true);
1134 OpRegCopy(class_reg, TargetReg(kRet0)); // Align usage with fast path
1135 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
1136 } else if (use_declaring_class) {
1137 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
1138 LoadWordDisp(TargetReg(kArg1),
Brian Carlstromea46f952013-07-30 01:26:50 -07001139 mirror::ArtMethod::DeclaringClassOffset().Int32Value(), class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001140 } else {
1141 // Load dex cache entry into class_reg (kArg2)
1142 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
1143 LoadWordDisp(TargetReg(kArg1),
Brian Carlstromea46f952013-07-30 01:26:50 -07001144 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001145 int32_t offset_of_type =
1146 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() + (sizeof(mirror::Class*)
1147 * type_idx);
1148 LoadWordDisp(class_reg, offset_of_type, class_reg);
1149 if (!can_assume_type_is_in_dex_cache) {
1150 // Need to test presence of type in dex cache at runtime
1151 LIR* hop_branch = OpCmpImmBranch(kCondNe, class_reg, 0, NULL);
1152 // Not resolved
1153 // Call out to helper, which will return resolved type in kRet0
Ian Rogers848871b2013-08-05 10:56:33 -07001154 CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx, true);
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001155 OpRegCopy(TargetReg(kArg2), TargetReg(kRet0)); // Align usage with fast path
Brian Carlstrom7940e442013-07-12 13:46:57 -07001156 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); /* reload Ref */
1157 // Rejoin code paths
1158 LIR* hop_target = NewLIR0(kPseudoTargetLabel);
1159 hop_branch->target = hop_target;
1160 }
1161 }
1162 /* kArg0 is ref, kArg2 is class. If ref==null, use directly as bool result */
1163 RegLocation rl_result = GetReturn(false);
1164 if (cu_->instruction_set == kMips) {
1165 // On MIPS rArg0 != rl_result, place false in result if branch is taken.
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001166 LoadConstant(rl_result.reg.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001167 }
1168 LIR* branch1 = OpCmpImmBranch(kCondEq, TargetReg(kArg0), 0, NULL);
1169
1170 /* load object->klass_ */
1171 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
1172 LoadWordDisp(TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(), TargetReg(kArg1));
1173 /* kArg0 is ref, kArg1 is ref->klass_, kArg2 is class */
1174 LIR* branchover = NULL;
1175 if (type_known_final) {
1176 // rl_result == ref == null == 0.
1177 if (cu_->instruction_set == kThumb2) {
1178 OpRegReg(kOpCmp, TargetReg(kArg1), TargetReg(kArg2)); // Same?
1179 OpIT(kCondEq, "E"); // if-convert the test
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001180 LoadConstant(rl_result.reg.GetReg(), 1); // .eq case - load true
1181 LoadConstant(rl_result.reg.GetReg(), 0); // .ne case - load false
Brian Carlstrom7940e442013-07-12 13:46:57 -07001182 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001183 LoadConstant(rl_result.reg.GetReg(), 0); // ne case - load false
Brian Carlstrom7940e442013-07-12 13:46:57 -07001184 branchover = OpCmpBranch(kCondNe, TargetReg(kArg1), TargetReg(kArg2), NULL);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001185 LoadConstant(rl_result.reg.GetReg(), 1); // eq case - load true
Brian Carlstrom7940e442013-07-12 13:46:57 -07001186 }
1187 } else {
1188 if (cu_->instruction_set == kThumb2) {
Ian Rogers848871b2013-08-05 10:56:33 -07001189 int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivial));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001190 if (!type_known_abstract) {
1191 /* Uses conditional nullification */
1192 OpRegReg(kOpCmp, TargetReg(kArg1), TargetReg(kArg2)); // Same?
1193 OpIT(kCondEq, "EE"); // if-convert the test
1194 LoadConstant(TargetReg(kArg0), 1); // .eq case - load true
1195 }
1196 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2)); // .ne case - arg0 <= class
1197 OpReg(kOpBlx, r_tgt); // .ne case: helper(class, ref->class)
1198 FreeTemp(r_tgt);
1199 } else {
1200 if (!type_known_abstract) {
1201 /* Uses branchovers */
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001202 LoadConstant(rl_result.reg.GetReg(), 1); // assume true
Brian Carlstrom7940e442013-07-12 13:46:57 -07001203 branchover = OpCmpBranch(kCondEq, TargetReg(kArg1), TargetReg(kArg2), NULL);
1204 }
Mark Mendell6607d972014-02-10 06:54:18 -08001205 int r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(pInstanceofNonTrivial));
1206 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2)); // .ne case - arg0 <= class
1207 OpReg(kOpBlx, r_tgt); // .ne case: helper(class, ref->class)
1208 FreeTemp(r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001209 }
1210 }
1211 // TODO: only clobber when type isn't final?
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001212 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001213 /* branch targets here */
1214 LIR* target = NewLIR0(kPseudoTargetLabel);
1215 StoreValue(rl_dest, rl_result);
1216 branch1->target = target;
1217 if (branchover != NULL) {
1218 branchover->target = target;
1219 }
1220}
1221
1222void Mir2Lir::GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src) {
1223 bool type_known_final, type_known_abstract, use_declaring_class;
1224 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1225 *cu_->dex_file,
1226 type_idx,
1227 &type_known_final,
1228 &type_known_abstract,
1229 &use_declaring_class);
1230 bool can_assume_type_is_in_dex_cache = !needs_access_check &&
1231 cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx);
1232
1233 if ((use_declaring_class || can_assume_type_is_in_dex_cache) && type_known_final) {
1234 GenInstanceofFinal(use_declaring_class, type_idx, rl_dest, rl_src);
1235 } else {
1236 GenInstanceofCallingHelper(needs_access_check, type_known_final, type_known_abstract,
1237 use_declaring_class, can_assume_type_is_in_dex_cache,
1238 type_idx, rl_dest, rl_src);
1239 }
1240}
1241
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001242void Mir2Lir::GenCheckCast(uint32_t insn_idx, uint32_t type_idx, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001243 bool type_known_final, type_known_abstract, use_declaring_class;
1244 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1245 *cu_->dex_file,
1246 type_idx,
1247 &type_known_final,
1248 &type_known_abstract,
1249 &use_declaring_class);
1250 // Note: currently type_known_final is unused, as optimizing will only improve the performance
1251 // of the exception throw path.
1252 DexCompilationUnit* cu = mir_graph_->GetCurrentDexCompilationUnit();
Vladimir Marko2730db02014-01-27 11:15:17 +00001253 if (!needs_access_check && cu_->compiler_driver->IsSafeCast(cu, insn_idx)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001254 // Verifier type analysis proved this check cast would never cause an exception.
1255 return;
1256 }
1257 FlushAllRegs();
1258 // May generate a call - use explicit registers
1259 LockCallTemps();
1260 LoadCurrMethodDirect(TargetReg(kArg1)); // kArg1 <= current Method*
1261 int class_reg = TargetReg(kArg2); // kArg2 will hold the Class*
1262 if (needs_access_check) {
1263 // Check we have access to type_idx and if not throw IllegalAccessError,
1264 // returns Class* in kRet0
1265 // InitializeTypeAndVerifyAccess(idx, method)
Ian Rogers848871b2013-08-05 10:56:33 -07001266 CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeTypeAndVerifyAccess),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001267 type_idx, TargetReg(kArg1), true);
1268 OpRegCopy(class_reg, TargetReg(kRet0)); // Align usage with fast path
1269 } else if (use_declaring_class) {
1270 LoadWordDisp(TargetReg(kArg1),
Brian Carlstromea46f952013-07-30 01:26:50 -07001271 mirror::ArtMethod::DeclaringClassOffset().Int32Value(), class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001272 } else {
1273 // Load dex cache entry into class_reg (kArg2)
1274 LoadWordDisp(TargetReg(kArg1),
Brian Carlstromea46f952013-07-30 01:26:50 -07001275 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001276 int32_t offset_of_type =
1277 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() +
1278 (sizeof(mirror::Class*) * type_idx);
1279 LoadWordDisp(class_reg, offset_of_type, class_reg);
1280 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx)) {
1281 // Need to test presence of type in dex cache at runtime
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001282 LIR* hop_branch = OpCmpImmBranch(kCondEq, class_reg, 0, NULL);
1283 LIR* cont = NewLIR0(kPseudoTargetLabel);
1284
1285 // Slow path to initialize the type. Executed if the type is NULL.
1286 class SlowPath : public LIRSlowPath {
1287 public:
1288 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, const int type_idx,
1289 const int class_reg) :
1290 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), type_idx_(type_idx),
1291 class_reg_(class_reg) {
1292 }
1293
1294 void Compile() {
1295 GenerateTargetLabel();
1296
1297 // Call out to helper, which will return resolved type in kArg0
1298 // InitializeTypeFromCode(idx, method)
1299 m2l_->CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(pInitializeType), type_idx_,
1300 m2l_->TargetReg(kArg1), true);
1301 m2l_->OpRegCopy(class_reg_, m2l_->TargetReg(kRet0)); // Align usage with fast path
1302 m2l_->OpUnconditionalBranch(cont_);
1303 }
1304 public:
1305 const int type_idx_;
1306 const int class_reg_;
1307 };
1308
1309 AddSlowPath(new (arena_) SlowPath(this, hop_branch, cont,
1310 type_idx, class_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001311 }
1312 }
1313 // At this point, class_reg (kArg2) has class
1314 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001315
1316 // Slow path for the case where the classes are not equal. In this case we need
1317 // to call a helper function to do the check.
1318 class SlowPath : public LIRSlowPath {
1319 public:
1320 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, bool load):
1321 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), load_(load) {
1322 }
1323
1324 void Compile() {
1325 GenerateTargetLabel();
1326
1327 if (load_) {
1328 m2l_->LoadWordDisp(m2l_->TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(),
1329 m2l_->TargetReg(kArg1));
1330 }
1331 m2l_->CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(pCheckCast), m2l_->TargetReg(kArg2),
1332 m2l_->TargetReg(kArg1), true);
1333
1334 m2l_->OpUnconditionalBranch(cont_);
1335 }
1336
1337 private:
1338 bool load_;
1339 };
1340
1341 if (type_known_abstract) {
1342 // Easier case, run slow path if target is non-null (slow path will load from target)
1343 LIR* branch = OpCmpImmBranch(kCondNe, TargetReg(kArg0), 0, NULL);
1344 LIR* cont = NewLIR0(kPseudoTargetLabel);
1345 AddSlowPath(new (arena_) SlowPath(this, branch, cont, true));
1346 } else {
1347 // Harder, more common case. We need to generate a forward branch over the load
1348 // if the target is null. If it's non-null we perform the load and branch to the
1349 // slow path if the classes are not equal.
1350
1351 /* Null is OK - continue */
1352 LIR* branch1 = OpCmpImmBranch(kCondEq, TargetReg(kArg0), 0, NULL);
1353 /* load object->klass_ */
1354 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
1355 LoadWordDisp(TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(),
1356 TargetReg(kArg1));
1357
1358 LIR* branch2 = OpCmpBranch(kCondNe, TargetReg(kArg1), class_reg, NULL);
1359 LIR* cont = NewLIR0(kPseudoTargetLabel);
1360
1361 // Add the slow path that will not perform load since this is already done.
1362 AddSlowPath(new (arena_) SlowPath(this, branch2, cont, false));
1363
1364 // Set the null check to branch to the continuation.
1365 branch1->target = cont;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001366 }
1367}
1368
1369void Mir2Lir::GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001370 RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001371 RegLocation rl_result;
1372 if (cu_->instruction_set == kThumb2) {
1373 /*
1374 * NOTE: This is the one place in the code in which we might have
1375 * as many as six live temporary registers. There are 5 in the normal
1376 * set for Arm. Until we have spill capabilities, temporarily add
1377 * lr to the temp set. It is safe to do this locally, but note that
1378 * lr is used explicitly elsewhere in the code generator and cannot
1379 * normally be used as a general temp register.
1380 */
1381 MarkTemp(TargetReg(kLr)); // Add lr to the temp pool
1382 FreeTemp(TargetReg(kLr)); // and make it available
1383 }
1384 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1385 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1386 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1387 // The longs may overlap - use intermediate temp if so
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001388 if ((rl_result.reg.GetReg() == rl_src1.reg.GetHighReg()) || (rl_result.reg.GetReg() == rl_src2.reg.GetHighReg())) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001389 int t_reg = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001390 OpRegRegReg(first_op, t_reg, rl_src1.reg.GetReg(), rl_src2.reg.GetReg());
1391 OpRegRegReg(second_op, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(), rl_src2.reg.GetHighReg());
1392 OpRegCopy(rl_result.reg.GetReg(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001393 FreeTemp(t_reg);
1394 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001395 OpRegRegReg(first_op, rl_result.reg.GetReg(), rl_src1.reg.GetReg(), rl_src2.reg.GetReg());
1396 OpRegRegReg(second_op, rl_result.reg.GetHighReg(), rl_src1.reg.GetHighReg(),
1397 rl_src2.reg.GetHighReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001398 }
1399 /*
1400 * NOTE: If rl_dest refers to a frame variable in a large frame, the
1401 * following StoreValueWide might need to allocate a temp register.
1402 * To further work around the lack of a spill capability, explicitly
1403 * free any temps from rl_src1 & rl_src2 that aren't still live in rl_result.
1404 * Remove when spill is functional.
1405 */
1406 FreeRegLocTemps(rl_result, rl_src1);
1407 FreeRegLocTemps(rl_result, rl_src2);
1408 StoreValueWide(rl_dest, rl_result);
1409 if (cu_->instruction_set == kThumb2) {
1410 Clobber(TargetReg(kLr));
1411 UnmarkTemp(TargetReg(kLr)); // Remove lr from the temp pool
1412 }
1413}
1414
1415
1416void Mir2Lir::GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001417 RegLocation rl_src1, RegLocation rl_shift) {
Ian Rogers848871b2013-08-05 10:56:33 -07001418 ThreadOffset func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001419
1420 switch (opcode) {
1421 case Instruction::SHL_LONG:
1422 case Instruction::SHL_LONG_2ADDR:
Ian Rogers7655f292013-07-29 11:07:13 -07001423 func_offset = QUICK_ENTRYPOINT_OFFSET(pShlLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001424 break;
1425 case Instruction::SHR_LONG:
1426 case Instruction::SHR_LONG_2ADDR:
Ian Rogers7655f292013-07-29 11:07:13 -07001427 func_offset = QUICK_ENTRYPOINT_OFFSET(pShrLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001428 break;
1429 case Instruction::USHR_LONG:
1430 case Instruction::USHR_LONG_2ADDR:
Ian Rogers7655f292013-07-29 11:07:13 -07001431 func_offset = QUICK_ENTRYPOINT_OFFSET(pUshrLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001432 break;
1433 default:
1434 LOG(FATAL) << "Unexpected case";
1435 }
1436 FlushAllRegs(); /* Send everything to home location */
1437 CallRuntimeHelperRegLocationRegLocation(func_offset, rl_src1, rl_shift, false);
1438 RegLocation rl_result = GetReturnWide(false);
1439 StoreValueWide(rl_dest, rl_result);
1440}
1441
1442
1443void Mir2Lir::GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001444 RegLocation rl_src1, RegLocation rl_src2) {
Mark Mendellfeb2b4e2014-01-28 12:59:49 -08001445 DCHECK_NE(cu_->instruction_set, kX86);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001446 OpKind op = kOpBkpt;
1447 bool is_div_rem = false;
1448 bool check_zero = false;
1449 bool unary = false;
1450 RegLocation rl_result;
1451 bool shift_op = false;
1452 switch (opcode) {
1453 case Instruction::NEG_INT:
1454 op = kOpNeg;
1455 unary = true;
1456 break;
1457 case Instruction::NOT_INT:
1458 op = kOpMvn;
1459 unary = true;
1460 break;
1461 case Instruction::ADD_INT:
1462 case Instruction::ADD_INT_2ADDR:
1463 op = kOpAdd;
1464 break;
1465 case Instruction::SUB_INT:
1466 case Instruction::SUB_INT_2ADDR:
1467 op = kOpSub;
1468 break;
1469 case Instruction::MUL_INT:
1470 case Instruction::MUL_INT_2ADDR:
1471 op = kOpMul;
1472 break;
1473 case Instruction::DIV_INT:
1474 case Instruction::DIV_INT_2ADDR:
1475 check_zero = true;
1476 op = kOpDiv;
1477 is_div_rem = true;
1478 break;
1479 /* NOTE: returns in kArg1 */
1480 case Instruction::REM_INT:
1481 case Instruction::REM_INT_2ADDR:
1482 check_zero = true;
1483 op = kOpRem;
1484 is_div_rem = true;
1485 break;
1486 case Instruction::AND_INT:
1487 case Instruction::AND_INT_2ADDR:
1488 op = kOpAnd;
1489 break;
1490 case Instruction::OR_INT:
1491 case Instruction::OR_INT_2ADDR:
1492 op = kOpOr;
1493 break;
1494 case Instruction::XOR_INT:
1495 case Instruction::XOR_INT_2ADDR:
1496 op = kOpXor;
1497 break;
1498 case Instruction::SHL_INT:
1499 case Instruction::SHL_INT_2ADDR:
1500 shift_op = true;
1501 op = kOpLsl;
1502 break;
1503 case Instruction::SHR_INT:
1504 case Instruction::SHR_INT_2ADDR:
1505 shift_op = true;
1506 op = kOpAsr;
1507 break;
1508 case Instruction::USHR_INT:
1509 case Instruction::USHR_INT_2ADDR:
1510 shift_op = true;
1511 op = kOpLsr;
1512 break;
1513 default:
1514 LOG(FATAL) << "Invalid word arith op: " << opcode;
1515 }
1516 if (!is_div_rem) {
1517 if (unary) {
1518 rl_src1 = LoadValue(rl_src1, kCoreReg);
1519 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001520 OpRegReg(op, rl_result.reg.GetReg(), rl_src1.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001521 } else {
1522 if (shift_op) {
1523 int t_reg = INVALID_REG;
Mark Mendellfeb2b4e2014-01-28 12:59:49 -08001524 rl_src2 = LoadValue(rl_src2, kCoreReg);
1525 t_reg = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001526 OpRegRegImm(kOpAnd, t_reg, rl_src2.reg.GetReg(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001527 rl_src1 = LoadValue(rl_src1, kCoreReg);
1528 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001529 OpRegRegReg(op, rl_result.reg.GetReg(), rl_src1.reg.GetReg(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001530 FreeTemp(t_reg);
1531 } else {
1532 rl_src1 = LoadValue(rl_src1, kCoreReg);
1533 rl_src2 = LoadValue(rl_src2, kCoreReg);
1534 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001535 OpRegRegReg(op, rl_result.reg.GetReg(), rl_src1.reg.GetReg(), rl_src2.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001536 }
1537 }
1538 StoreValue(rl_dest, rl_result);
1539 } else {
Dave Allison70202782013-10-22 17:52:19 -07001540 bool done = false; // Set to true if we happen to find a way to use a real instruction.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001541 if (cu_->instruction_set == kMips) {
1542 rl_src1 = LoadValue(rl_src1, kCoreReg);
1543 rl_src2 = LoadValue(rl_src2, kCoreReg);
1544 if (check_zero) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001545 GenImmedCheck(kCondEq, rl_src2.reg.GetReg(), 0, kThrowDivZero);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001546 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001547 rl_result = GenDivRem(rl_dest, rl_src1.reg.GetReg(), rl_src2.reg.GetReg(), op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001548 done = true;
1549 } else if (cu_->instruction_set == kThumb2) {
1550 if (cu_->GetInstructionSetFeatures().HasDivideInstruction()) {
1551 // Use ARM SDIV instruction for division. For remainder we also need to
1552 // calculate using a MUL and subtract.
1553 rl_src1 = LoadValue(rl_src1, kCoreReg);
1554 rl_src2 = LoadValue(rl_src2, kCoreReg);
1555 if (check_zero) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001556 GenImmedCheck(kCondEq, rl_src2.reg.GetReg(), 0, kThrowDivZero);
Dave Allison70202782013-10-22 17:52:19 -07001557 }
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001558 rl_result = GenDivRem(rl_dest, rl_src1.reg.GetReg(), rl_src2.reg.GetReg(), op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001559 done = true;
1560 }
1561 }
1562
1563 // If we haven't already generated the code use the callout function.
1564 if (!done) {
Ian Rogers848871b2013-08-05 10:56:33 -07001565 ThreadOffset func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001566 FlushAllRegs(); /* Send everything to home location */
1567 LoadValueDirectFixed(rl_src2, TargetReg(kArg1));
1568 int r_tgt = CallHelperSetup(func_offset);
1569 LoadValueDirectFixed(rl_src1, TargetReg(kArg0));
1570 if (check_zero) {
1571 GenImmedCheck(kCondEq, TargetReg(kArg1), 0, kThrowDivZero);
1572 }
Dave Allison70202782013-10-22 17:52:19 -07001573 // NOTE: callout here is not a safepoint.
Brian Carlstromdf629502013-07-17 22:39:56 -07001574 CallHelper(r_tgt, func_offset, false /* not a safepoint */);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001575 if (op == kOpDiv)
1576 rl_result = GetReturn(false);
1577 else
1578 rl_result = GetReturnAlt();
1579 }
1580 StoreValue(rl_dest, rl_result);
1581 }
1582}
1583
1584/*
1585 * The following are the first-level codegen routines that analyze the format
1586 * of each bytecode then either dispatch special purpose codegen routines
1587 * or produce corresponding Thumb instructions directly.
1588 */
1589
Brian Carlstrom7940e442013-07-12 13:46:57 -07001590// Returns true if no more than two bits are set in 'x'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001591static bool IsPopCountLE2(unsigned int x) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001592 x &= x - 1;
1593 return (x & (x - 1)) == 0;
1594}
1595
Brian Carlstrom7940e442013-07-12 13:46:57 -07001596// Returns true if it added instructions to 'cu' to divide 'rl_src' by 'lit'
1597// and store the result in 'rl_dest'.
buzbee11b63d12013-08-27 07:34:17 -07001598bool Mir2Lir::HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001599 RegLocation rl_src, RegLocation rl_dest, int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001600 if ((lit < 2) || ((cu_->instruction_set != kThumb2) && !IsPowerOfTwo(lit))) {
1601 return false;
1602 }
1603 // No divide instruction for Arm, so check for more special cases
1604 if ((cu_->instruction_set == kThumb2) && !IsPowerOfTwo(lit)) {
buzbee11b63d12013-08-27 07:34:17 -07001605 return SmallLiteralDivRem(dalvik_opcode, is_div, rl_src, rl_dest, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001606 }
1607 int k = LowestSetBit(lit);
1608 if (k >= 30) {
1609 // Avoid special cases.
1610 return false;
1611 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001612 rl_src = LoadValue(rl_src, kCoreReg);
1613 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee11b63d12013-08-27 07:34:17 -07001614 if (is_div) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001615 int t_reg = AllocTemp();
1616 if (lit == 2) {
1617 // Division by 2 is by far the most common division by constant.
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001618 OpRegRegImm(kOpLsr, t_reg, rl_src.reg.GetReg(), 32 - k);
1619 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg.GetReg());
1620 OpRegRegImm(kOpAsr, rl_result.reg.GetReg(), t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001621 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001622 OpRegRegImm(kOpAsr, t_reg, rl_src.reg.GetReg(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001623 OpRegRegImm(kOpLsr, t_reg, t_reg, 32 - k);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001624 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg.GetReg());
1625 OpRegRegImm(kOpAsr, rl_result.reg.GetReg(), t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001626 }
1627 } else {
1628 int t_reg1 = AllocTemp();
1629 int t_reg2 = AllocTemp();
1630 if (lit == 2) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001631 OpRegRegImm(kOpLsr, t_reg1, rl_src.reg.GetReg(), 32 - k);
1632 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001633 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit -1);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001634 OpRegRegReg(kOpSub, rl_result.reg.GetReg(), t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001635 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001636 OpRegRegImm(kOpAsr, t_reg1, rl_src.reg.GetReg(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001637 OpRegRegImm(kOpLsr, t_reg1, t_reg1, 32 - k);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001638 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001639 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit - 1);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001640 OpRegRegReg(kOpSub, rl_result.reg.GetReg(), t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001641 }
1642 }
1643 StoreValue(rl_dest, rl_result);
1644 return true;
1645}
1646
1647// Returns true if it added instructions to 'cu' to multiply 'rl_src' by 'lit'
1648// and store the result in 'rl_dest'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001649bool Mir2Lir::HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001650 // Can we simplify this multiplication?
1651 bool power_of_two = false;
1652 bool pop_count_le2 = false;
1653 bool power_of_two_minus_one = false;
1654 if (lit < 2) {
1655 // Avoid special cases.
1656 return false;
1657 } else if (IsPowerOfTwo(lit)) {
1658 power_of_two = true;
1659 } else if (IsPopCountLE2(lit)) {
1660 pop_count_le2 = true;
1661 } else if (IsPowerOfTwo(lit + 1)) {
1662 power_of_two_minus_one = true;
1663 } else {
1664 return false;
1665 }
1666 rl_src = LoadValue(rl_src, kCoreReg);
1667 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1668 if (power_of_two) {
1669 // Shift.
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001670 OpRegRegImm(kOpLsl, rl_result.reg.GetReg(), rl_src.reg.GetReg(), LowestSetBit(lit));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001671 } else if (pop_count_le2) {
1672 // Shift and add and shift.
1673 int first_bit = LowestSetBit(lit);
1674 int second_bit = LowestSetBit(lit ^ (1 << first_bit));
1675 GenMultiplyByTwoBitMultiplier(rl_src, rl_result, lit, first_bit, second_bit);
1676 } else {
1677 // Reverse subtract: (src << (shift + 1)) - src.
1678 DCHECK(power_of_two_minus_one);
1679 // TUNING: rsb dst, src, src lsl#LowestSetBit(lit + 1)
1680 int t_reg = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001681 OpRegRegImm(kOpLsl, t_reg, rl_src.reg.GetReg(), LowestSetBit(lit + 1));
1682 OpRegRegReg(kOpSub, rl_result.reg.GetReg(), t_reg, rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001683 }
1684 StoreValue(rl_dest, rl_result);
1685 return true;
1686}
1687
1688void Mir2Lir::GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001689 int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001690 RegLocation rl_result;
1691 OpKind op = static_cast<OpKind>(0); /* Make gcc happy */
1692 int shift_op = false;
1693 bool is_div = false;
1694
1695 switch (opcode) {
1696 case Instruction::RSUB_INT_LIT8:
1697 case Instruction::RSUB_INT: {
1698 rl_src = LoadValue(rl_src, kCoreReg);
1699 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1700 if (cu_->instruction_set == kThumb2) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001701 OpRegRegImm(kOpRsub, rl_result.reg.GetReg(), rl_src.reg.GetReg(), lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001702 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001703 OpRegReg(kOpNeg, rl_result.reg.GetReg(), rl_src.reg.GetReg());
1704 OpRegImm(kOpAdd, rl_result.reg.GetReg(), lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001705 }
1706 StoreValue(rl_dest, rl_result);
1707 return;
1708 }
1709
1710 case Instruction::SUB_INT:
1711 case Instruction::SUB_INT_2ADDR:
1712 lit = -lit;
1713 // Intended fallthrough
1714 case Instruction::ADD_INT:
1715 case Instruction::ADD_INT_2ADDR:
1716 case Instruction::ADD_INT_LIT8:
1717 case Instruction::ADD_INT_LIT16:
1718 op = kOpAdd;
1719 break;
1720 case Instruction::MUL_INT:
1721 case Instruction::MUL_INT_2ADDR:
1722 case Instruction::MUL_INT_LIT8:
1723 case Instruction::MUL_INT_LIT16: {
1724 if (HandleEasyMultiply(rl_src, rl_dest, lit)) {
1725 return;
1726 }
1727 op = kOpMul;
1728 break;
1729 }
1730 case Instruction::AND_INT:
1731 case Instruction::AND_INT_2ADDR:
1732 case Instruction::AND_INT_LIT8:
1733 case Instruction::AND_INT_LIT16:
1734 op = kOpAnd;
1735 break;
1736 case Instruction::OR_INT:
1737 case Instruction::OR_INT_2ADDR:
1738 case Instruction::OR_INT_LIT8:
1739 case Instruction::OR_INT_LIT16:
1740 op = kOpOr;
1741 break;
1742 case Instruction::XOR_INT:
1743 case Instruction::XOR_INT_2ADDR:
1744 case Instruction::XOR_INT_LIT8:
1745 case Instruction::XOR_INT_LIT16:
1746 op = kOpXor;
1747 break;
1748 case Instruction::SHL_INT_LIT8:
1749 case Instruction::SHL_INT:
1750 case Instruction::SHL_INT_2ADDR:
1751 lit &= 31;
1752 shift_op = true;
1753 op = kOpLsl;
1754 break;
1755 case Instruction::SHR_INT_LIT8:
1756 case Instruction::SHR_INT:
1757 case Instruction::SHR_INT_2ADDR:
1758 lit &= 31;
1759 shift_op = true;
1760 op = kOpAsr;
1761 break;
1762 case Instruction::USHR_INT_LIT8:
1763 case Instruction::USHR_INT:
1764 case Instruction::USHR_INT_2ADDR:
1765 lit &= 31;
1766 shift_op = true;
1767 op = kOpLsr;
1768 break;
1769
1770 case Instruction::DIV_INT:
1771 case Instruction::DIV_INT_2ADDR:
1772 case Instruction::DIV_INT_LIT8:
1773 case Instruction::DIV_INT_LIT16:
1774 case Instruction::REM_INT:
1775 case Instruction::REM_INT_2ADDR:
1776 case Instruction::REM_INT_LIT8:
1777 case Instruction::REM_INT_LIT16: {
1778 if (lit == 0) {
1779 GenImmedCheck(kCondAl, 0, 0, kThrowDivZero);
1780 return;
1781 }
buzbee11b63d12013-08-27 07:34:17 -07001782 if ((opcode == Instruction::DIV_INT) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001783 (opcode == Instruction::DIV_INT_2ADDR) ||
buzbee11b63d12013-08-27 07:34:17 -07001784 (opcode == Instruction::DIV_INT_LIT8) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001785 (opcode == Instruction::DIV_INT_LIT16)) {
1786 is_div = true;
1787 } else {
1788 is_div = false;
1789 }
buzbee11b63d12013-08-27 07:34:17 -07001790 if (HandleEasyDivRem(opcode, is_div, rl_src, rl_dest, lit)) {
1791 return;
1792 }
Dave Allison70202782013-10-22 17:52:19 -07001793
1794 bool done = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001795 if (cu_->instruction_set == kMips) {
1796 rl_src = LoadValue(rl_src, kCoreReg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001797 rl_result = GenDivRemLit(rl_dest, rl_src.reg.GetReg(), lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001798 done = true;
Mark Mendell2bf31e62014-01-23 12:13:40 -08001799 } else if (cu_->instruction_set == kX86) {
1800 rl_result = GenDivRemLit(rl_dest, rl_src, lit, is_div);
1801 done = true;
Dave Allison70202782013-10-22 17:52:19 -07001802 } else if (cu_->instruction_set == kThumb2) {
1803 if (cu_->GetInstructionSetFeatures().HasDivideInstruction()) {
1804 // Use ARM SDIV instruction for division. For remainder we also need to
1805 // calculate using a MUL and subtract.
1806 rl_src = LoadValue(rl_src, kCoreReg);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001807 rl_result = GenDivRemLit(rl_dest, rl_src.reg.GetReg(), lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001808 done = true;
1809 }
1810 }
1811
1812 if (!done) {
1813 FlushAllRegs(); /* Everything to home location. */
Brian Carlstrom7940e442013-07-12 13:46:57 -07001814 LoadValueDirectFixed(rl_src, TargetReg(kArg0));
1815 Clobber(TargetReg(kArg0));
Ian Rogers848871b2013-08-05 10:56:33 -07001816 ThreadOffset func_offset = QUICK_ENTRYPOINT_OFFSET(pIdivmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001817 CallRuntimeHelperRegImm(func_offset, TargetReg(kArg0), lit, false);
1818 if (is_div)
1819 rl_result = GetReturn(false);
1820 else
1821 rl_result = GetReturnAlt();
1822 }
1823 StoreValue(rl_dest, rl_result);
1824 return;
1825 }
1826 default:
1827 LOG(FATAL) << "Unexpected opcode " << opcode;
1828 }
1829 rl_src = LoadValue(rl_src, kCoreReg);
1830 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Dave Allison70202782013-10-22 17:52:19 -07001831 // Avoid shifts by literal 0 - no support in Thumb. Change to copy.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001832 if (shift_op && (lit == 0)) {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001833 OpRegCopy(rl_result.reg.GetReg(), rl_src.reg.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001834 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001835 OpRegRegImm(op, rl_result.reg.GetReg(), rl_src.reg.GetReg(), lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001836 }
1837 StoreValue(rl_dest, rl_result);
1838}
1839
1840void Mir2Lir::GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001841 RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001842 RegLocation rl_result;
1843 OpKind first_op = kOpBkpt;
1844 OpKind second_op = kOpBkpt;
1845 bool call_out = false;
1846 bool check_zero = false;
Ian Rogers848871b2013-08-05 10:56:33 -07001847 ThreadOffset func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001848 int ret_reg = TargetReg(kRet0);
1849
1850 switch (opcode) {
1851 case Instruction::NOT_LONG:
1852 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1853 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1854 // Check for destructive overlap
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001855 if (rl_result.reg.GetReg() == rl_src2.reg.GetHighReg()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001856 int t_reg = AllocTemp();
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001857 OpRegCopy(t_reg, rl_src2.reg.GetHighReg());
1858 OpRegReg(kOpMvn, rl_result.reg.GetReg(), rl_src2.reg.GetReg());
1859 OpRegReg(kOpMvn, rl_result.reg.GetHighReg(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001860 FreeTemp(t_reg);
1861 } else {
Bill Buzbee00e1ec62014-02-27 23:44:13 +00001862 OpRegReg(kOpMvn, rl_result.reg.GetReg(), rl_src2.reg.GetReg());
1863 OpRegReg(kOpMvn, rl_result.reg.GetHighReg(), rl_src2.reg.GetHighReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001864 }
1865 StoreValueWide(rl_dest, rl_result);
1866 return;
1867 case Instruction::ADD_LONG:
1868 case Instruction::ADD_LONG_2ADDR:
1869 if (cu_->instruction_set != kThumb2) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001870 GenAddLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001871 return;
1872 }
1873 first_op = kOpAdd;
1874 second_op = kOpAdc;
1875 break;
1876 case Instruction::SUB_LONG:
1877 case Instruction::SUB_LONG_2ADDR:
1878 if (cu_->instruction_set != kThumb2) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001879 GenSubLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001880 return;
1881 }
1882 first_op = kOpSub;
1883 second_op = kOpSbc;
1884 break;
1885 case Instruction::MUL_LONG:
1886 case Instruction::MUL_LONG_2ADDR:
Mark Mendell4708dcd2014-01-22 09:05:18 -08001887 if (cu_->instruction_set != kMips) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001888 GenMulLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001889 return;
1890 } else {
1891 call_out = true;
1892 ret_reg = TargetReg(kRet0);
Ian Rogers7655f292013-07-29 11:07:13 -07001893 func_offset = QUICK_ENTRYPOINT_OFFSET(pLmul);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001894 }
1895 break;
1896 case Instruction::DIV_LONG:
1897 case Instruction::DIV_LONG_2ADDR:
1898 call_out = true;
1899 check_zero = true;
1900 ret_reg = TargetReg(kRet0);
Ian Rogers7655f292013-07-29 11:07:13 -07001901 func_offset = QUICK_ENTRYPOINT_OFFSET(pLdiv);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001902 break;
1903 case Instruction::REM_LONG:
1904 case Instruction::REM_LONG_2ADDR:
1905 call_out = true;
1906 check_zero = true;
Ian Rogersa9a82542013-10-04 11:17:26 -07001907 func_offset = QUICK_ENTRYPOINT_OFFSET(pLmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001908 /* NOTE - for Arm, result is in kArg2/kArg3 instead of kRet0/kRet1 */
1909 ret_reg = (cu_->instruction_set == kThumb2) ? TargetReg(kArg2) : TargetReg(kRet0);
1910 break;
1911 case Instruction::AND_LONG_2ADDR:
1912 case Instruction::AND_LONG:
1913 if (cu_->instruction_set == kX86) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001914 return GenAndLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001915 }
1916 first_op = kOpAnd;
1917 second_op = kOpAnd;
1918 break;
1919 case Instruction::OR_LONG:
1920 case Instruction::OR_LONG_2ADDR:
1921 if (cu_->instruction_set == kX86) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001922 GenOrLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001923 return;
1924 }
1925 first_op = kOpOr;
1926 second_op = kOpOr;
1927 break;
1928 case Instruction::XOR_LONG:
1929 case Instruction::XOR_LONG_2ADDR:
1930 if (cu_->instruction_set == kX86) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001931 GenXorLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001932 return;
1933 }
1934 first_op = kOpXor;
1935 second_op = kOpXor;
1936 break;
1937 case Instruction::NEG_LONG: {
1938 GenNegLong(rl_dest, rl_src2);
1939 return;
1940 }
1941 default:
1942 LOG(FATAL) << "Invalid long arith op";
1943 }
1944 if (!call_out) {
1945 GenLong3Addr(first_op, second_op, rl_dest, rl_src1, rl_src2);
1946 } else {
1947 FlushAllRegs(); /* Send everything to home location */
1948 if (check_zero) {
1949 LoadValueDirectWideFixed(rl_src2, TargetReg(kArg2), TargetReg(kArg3));
1950 int r_tgt = CallHelperSetup(func_offset);
1951 GenDivZeroCheck(TargetReg(kArg2), TargetReg(kArg3));
1952 LoadValueDirectWideFixed(rl_src1, TargetReg(kArg0), TargetReg(kArg1));
1953 // NOTE: callout here is not a safepoint
1954 CallHelper(r_tgt, func_offset, false /* not safepoint */);
1955 } else {
1956 CallRuntimeHelperRegLocationRegLocation(func_offset, rl_src1, rl_src2, false);
1957 }
1958 // Adjust return regs in to handle case of rem returning kArg2/kArg3
1959 if (ret_reg == TargetReg(kRet0))
1960 rl_result = GetReturnWide(false);
1961 else
1962 rl_result = GetReturnWideAlt();
1963 StoreValueWide(rl_dest, rl_result);
1964 }
1965}
1966
Ian Rogers848871b2013-08-05 10:56:33 -07001967void Mir2Lir::GenConversionCall(ThreadOffset func_offset,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001968 RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001969 /*
1970 * Don't optimize the register usage since it calls out to support
1971 * functions
1972 */
1973 FlushAllRegs(); /* Send everything to home location */
Brian Carlstrom7940e442013-07-12 13:46:57 -07001974 CallRuntimeHelperRegLocation(func_offset, rl_src, false);
1975 if (rl_dest.wide) {
1976 RegLocation rl_result;
1977 rl_result = GetReturnWide(rl_dest.fp);
1978 StoreValueWide(rl_dest, rl_result);
1979 } else {
1980 RegLocation rl_result;
1981 rl_result = GetReturn(rl_dest.fp);
1982 StoreValue(rl_dest, rl_result);
1983 }
1984}
1985
1986/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001987void Mir2Lir::GenSuspendTest(int opt_flags) {
Dave Allisonb373e092014-02-20 16:06:36 -08001988 if (Runtime::Current()->ExplicitSuspendChecks()) {
1989 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
1990 return;
1991 }
1992 FlushAllRegs();
1993 LIR* branch = OpTestSuspend(NULL);
1994 LIR* ret_lab = NewLIR0(kPseudoTargetLabel);
1995 LIR* target = RawLIR(current_dalvik_offset_, kPseudoSuspendTarget, WrapPointer(ret_lab),
1996 current_dalvik_offset_);
1997 branch->target = target;
1998 suspend_launchpads_.Insert(target);
1999 } else {
2000 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2001 return;
2002 }
2003 FlushAllRegs(); // TODO: needed?
2004 LIR* inst = CheckSuspendUsingLoad();
2005 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002006 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002007}
2008
2009/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07002010void Mir2Lir::GenSuspendTestAndBranch(int opt_flags, LIR* target) {
Dave Allisonb373e092014-02-20 16:06:36 -08002011 if (Runtime::Current()->ExplicitSuspendChecks()) {
2012 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2013 OpUnconditionalBranch(target);
2014 return;
2015 }
2016 OpTestSuspend(target);
2017 LIR* launch_pad =
2018 RawLIR(current_dalvik_offset_, kPseudoSuspendTarget, WrapPointer(target),
2019 current_dalvik_offset_);
2020 FlushAllRegs();
2021 OpUnconditionalBranch(launch_pad);
2022 suspend_launchpads_.Insert(launch_pad);
2023 } else {
2024 // For the implicit suspend check, just perform the trigger
2025 // load and branch to the target.
2026 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2027 OpUnconditionalBranch(target);
2028 return;
2029 }
2030 FlushAllRegs();
2031 LIR* inst = CheckSuspendUsingLoad();
2032 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002033 OpUnconditionalBranch(target);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002034 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002035}
2036
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002037/* Call out to helper assembly routine that will null check obj and then lock it. */
2038void Mir2Lir::GenMonitorEnter(int opt_flags, RegLocation rl_src) {
2039 FlushAllRegs();
2040 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(pLockObject), rl_src, true);
2041}
2042
2043/* Call out to helper assembly routine that will null check obj and then unlock it. */
2044void Mir2Lir::GenMonitorExit(int opt_flags, RegLocation rl_src) {
2045 FlushAllRegs();
2046 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(pUnlockObject), rl_src, true);
2047}
2048
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002049/* Generic code for generating a wide constant into a VR. */
2050void Mir2Lir::GenConstWide(RegLocation rl_dest, int64_t value) {
2051 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
Bill Buzbee00e1ec62014-02-27 23:44:13 +00002052 LoadConstantWide(rl_result.reg.GetReg(), rl_result.reg.GetHighReg(), value);
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002053 StoreValueWide(rl_dest, rl_result);
2054}
2055
Brian Carlstrom7940e442013-07-12 13:46:57 -07002056} // namespace art