blob: 4e32931670c26e31058dd00883f6e58484b198a9 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7940e442013-07-12 13:46:57 -070016#include "dex/compiler_ir.h"
17#include "dex/compiler_internals.h"
Brian Carlstrom60d7a652014-03-13 18:10:08 -070018#include "dex/quick/arm/arm_lir.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "dex/quick/mir_to_lir-inl.h"
Dave Allison754ddad2014-02-19 14:05:39 -080020#include "driver/compiler_options.h"
Ian Rogers166db042013-07-26 12:05:57 -070021#include "entrypoints/quick/quick_entrypoints.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022#include "mirror/array.h"
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080023#include "mirror/object-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070024#include "verifier/method_verifier.h"
Dave Allisonbcec6fb2014-01-17 12:52:22 -080025#include <functional>
Brian Carlstrom7940e442013-07-12 13:46:57 -070026
27namespace art {
28
29/*
30 * This source files contains "gen" codegen routines that should
31 * be applicable to most targets. Only mid-level support utilities
32 * and "op" calls may be used here.
33 */
34
35/*
buzbeeb48819d2013-09-14 16:15:25 -070036 * Generate a kPseudoBarrier marker to indicate the boundary of special
Brian Carlstrom7940e442013-07-12 13:46:57 -070037 * blocks.
38 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070039void Mir2Lir::GenBarrier() {
Brian Carlstrom7940e442013-07-12 13:46:57 -070040 LIR* barrier = NewLIR0(kPseudoBarrier);
41 /* Mark all resources as being clobbered */
buzbeeb48819d2013-09-14 16:15:25 -070042 DCHECK(!barrier->flags.use_def_invalid);
43 barrier->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070044}
45
buzbee0d829482013-10-11 15:24:55 -070046// TODO: need to do some work to split out targets with
Brian Carlstrom7940e442013-07-12 13:46:57 -070047// condition codes and those without
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070048LIR* Mir2Lir::GenCheck(ConditionCode c_code, ThrowKind kind) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070049 DCHECK_NE(cu_->instruction_set, kMips);
50 LIR* tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_);
51 LIR* branch = OpCondBranch(c_code, tgt);
52 // Remember branch target - will process later
53 throw_launchpads_.Insert(tgt);
54 return branch;
55}
56
buzbee2700f7e2014-03-07 09:46:20 -080057LIR* Mir2Lir::GenImmedCheck(ConditionCode c_code, RegStorage reg, int imm_val, ThrowKind kind) {
58 LIR* tgt;
Brian Carlstrom7940e442013-07-12 13:46:57 -070059 LIR* branch;
60 if (c_code == kCondAl) {
buzbee2700f7e2014-03-07 09:46:20 -080061 tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_, RegStorage::kInvalidRegVal,
62 imm_val);
Brian Carlstrom7940e442013-07-12 13:46:57 -070063 branch = OpUnconditionalBranch(tgt);
64 } else {
buzbee2700f7e2014-03-07 09:46:20 -080065 tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_, reg.GetReg(), imm_val);
Brian Carlstrom7940e442013-07-12 13:46:57 -070066 branch = OpCmpImmBranch(c_code, reg, imm_val, tgt);
67 }
68 // Remember branch target - will process later
69 throw_launchpads_.Insert(tgt);
70 return branch;
71}
72
Dave Allisonb373e092014-02-20 16:06:36 -080073
Brian Carlstrom7940e442013-07-12 13:46:57 -070074/* Perform null-check on a register. */
buzbee2700f7e2014-03-07 09:46:20 -080075LIR* Mir2Lir::GenNullCheck(RegStorage m_reg, int opt_flags) {
Dave Allisonb373e092014-02-20 16:06:36 -080076 if (Runtime::Current()->ExplicitNullChecks()) {
Dave Allisonf9439142014-03-27 15:10:22 -070077 return GenExplicitNullCheck(m_reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 }
Dave Allisonb373e092014-02-20 16:06:36 -080079 return nullptr;
80}
81
Dave Allisonf9439142014-03-27 15:10:22 -070082/* Perform an explicit null-check on a register. */
83LIR* Mir2Lir::GenExplicitNullCheck(RegStorage m_reg, int opt_flags) {
84 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
85 return NULL;
86 }
87 return GenImmedCheck(kCondEq, m_reg, 0, kThrowNullPointer);
88}
89
Dave Allisonb373e092014-02-20 16:06:36 -080090void Mir2Lir::MarkPossibleNullPointerException(int opt_flags) {
91 if (!Runtime::Current()->ExplicitNullChecks()) {
92 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
93 return;
94 }
95 MarkSafepointPC(last_lir_insn_);
96 }
97}
98
99void Mir2Lir::MarkPossibleStackOverflowException() {
100 if (!Runtime::Current()->ExplicitStackOverflowChecks()) {
101 MarkSafepointPC(last_lir_insn_);
102 }
103}
104
buzbee2700f7e2014-03-07 09:46:20 -0800105void Mir2Lir::ForceImplicitNullCheck(RegStorage reg, int opt_flags) {
Dave Allisonb373e092014-02-20 16:06:36 -0800106 if (!Runtime::Current()->ExplicitNullChecks()) {
107 if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
108 return;
109 }
110 // Force an implicit null check by performing a memory operation (load) from the given
111 // register with offset 0. This will cause a signal if the register contains 0 (null).
buzbee2700f7e2014-03-07 09:46:20 -0800112 RegStorage tmp = AllocTemp();
113 // TODO: for Mips, would be best to use rZERO as the bogus register target.
Dave Allisonb373e092014-02-20 16:06:36 -0800114 LIR* load = LoadWordDisp(reg, 0, tmp);
115 FreeTemp(tmp);
116 MarkSafepointPC(load);
117 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700118}
119
120/* Perform check on two registers */
buzbee2700f7e2014-03-07 09:46:20 -0800121LIR* Mir2Lir::GenRegRegCheck(ConditionCode c_code, RegStorage reg1, RegStorage reg2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700122 ThrowKind kind) {
buzbee2700f7e2014-03-07 09:46:20 -0800123 LIR* tgt = RawLIR(0, kPseudoThrowTarget, kind, current_dalvik_offset_, reg1.GetReg(),
124 reg2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125 LIR* branch = OpCmpBranch(c_code, reg1, reg2, tgt);
126 // Remember branch target - will process later
127 throw_launchpads_.Insert(tgt);
128 return branch;
129}
130
131void Mir2Lir::GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1,
132 RegLocation rl_src2, LIR* taken,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700133 LIR* fall_through) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700134 ConditionCode cond;
135 switch (opcode) {
136 case Instruction::IF_EQ:
137 cond = kCondEq;
138 break;
139 case Instruction::IF_NE:
140 cond = kCondNe;
141 break;
142 case Instruction::IF_LT:
143 cond = kCondLt;
144 break;
145 case Instruction::IF_GE:
146 cond = kCondGe;
147 break;
148 case Instruction::IF_GT:
149 cond = kCondGt;
150 break;
151 case Instruction::IF_LE:
152 cond = kCondLe;
153 break;
154 default:
155 cond = static_cast<ConditionCode>(0);
156 LOG(FATAL) << "Unexpected opcode " << opcode;
157 }
158
159 // Normalize such that if either operand is constant, src2 will be constant
160 if (rl_src1.is_const) {
161 RegLocation rl_temp = rl_src1;
162 rl_src1 = rl_src2;
163 rl_src2 = rl_temp;
164 cond = FlipComparisonOrder(cond);
165 }
166
167 rl_src1 = LoadValue(rl_src1, kCoreReg);
168 // Is this really an immediate comparison?
169 if (rl_src2.is_const) {
170 // If it's already live in a register or not easily materialized, just keep going
171 RegLocation rl_temp = UpdateLoc(rl_src2);
172 if ((rl_temp.location == kLocDalvikFrame) &&
173 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src2))) {
174 // OK - convert this to a compare immediate and branch
buzbee2700f7e2014-03-07 09:46:20 -0800175 OpCmpImmBranch(cond, rl_src1.reg, mir_graph_->ConstantValue(rl_src2), taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700176 return;
177 }
178 }
179 rl_src2 = LoadValue(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800180 OpCmpBranch(cond, rl_src1.reg, rl_src2.reg, taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700181}
182
183void Mir2Lir::GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, LIR* taken,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700184 LIR* fall_through) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700185 ConditionCode cond;
186 rl_src = LoadValue(rl_src, kCoreReg);
187 switch (opcode) {
188 case Instruction::IF_EQZ:
189 cond = kCondEq;
190 break;
191 case Instruction::IF_NEZ:
192 cond = kCondNe;
193 break;
194 case Instruction::IF_LTZ:
195 cond = kCondLt;
196 break;
197 case Instruction::IF_GEZ:
198 cond = kCondGe;
199 break;
200 case Instruction::IF_GTZ:
201 cond = kCondGt;
202 break;
203 case Instruction::IF_LEZ:
204 cond = kCondLe;
205 break;
206 default:
207 cond = static_cast<ConditionCode>(0);
208 LOG(FATAL) << "Unexpected opcode " << opcode;
209 }
buzbee2700f7e2014-03-07 09:46:20 -0800210 OpCmpImmBranch(cond, rl_src.reg, 0, taken);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700211}
212
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700213void Mir2Lir::GenIntToLong(RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700214 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
215 if (rl_src.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -0800216 OpRegCopy(rl_result.reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700217 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800218 LoadValueDirect(rl_src, rl_result.reg.GetLow());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700219 }
buzbee2700f7e2014-03-07 09:46:20 -0800220 OpRegRegImm(kOpAsr, rl_result.reg.GetHigh(), rl_result.reg.GetLow(), 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700221 StoreValueWide(rl_dest, rl_result);
222}
223
224void Mir2Lir::GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700225 RegLocation rl_src) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700226 rl_src = LoadValue(rl_src, kCoreReg);
227 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
228 OpKind op = kOpInvalid;
229 switch (opcode) {
230 case Instruction::INT_TO_BYTE:
231 op = kOp2Byte;
232 break;
233 case Instruction::INT_TO_SHORT:
234 op = kOp2Short;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700235 break;
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700236 case Instruction::INT_TO_CHAR:
237 op = kOp2Char;
238 break;
239 default:
240 LOG(ERROR) << "Bad int conversion type";
241 }
buzbee2700f7e2014-03-07 09:46:20 -0800242 OpRegReg(op, rl_result.reg, rl_src.reg);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700243 StoreValue(rl_dest, rl_result);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700244}
245
246/*
247 * Let helper function take care of everything. Will call
248 * Array::AllocFromCode(type_idx, method, count);
249 * Note: AllocFromCode will handle checks for errNegativeArraySize.
250 */
251void Mir2Lir::GenNewArray(uint32_t type_idx, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700252 RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700253 FlushAllRegs(); /* Everything to home location */
Ian Rogersdd7624d2014-03-14 17:43:00 -0700254 ThreadOffset<4> func_offset(-1);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800255 const DexFile* dex_file = cu_->dex_file;
256 CompilerDriver* driver = cu_->compiler_driver;
257 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *dex_file,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700258 type_idx)) {
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800259 bool is_type_initialized; // Ignored as an array does not have an initializer.
260 bool use_direct_type_ptr;
261 uintptr_t direct_type_ptr;
262 if (kEmbedClassInCode &&
263 driver->CanEmbedTypeInCode(*dex_file, type_idx,
264 &is_type_initialized, &use_direct_type_ptr, &direct_type_ptr)) {
265 // The fast path.
266 if (!use_direct_type_ptr) {
Mark Mendell55d0eac2014-02-06 11:02:52 -0800267 LoadClassType(type_idx, kArg0);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700268 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocArrayResolved);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800269 CallRuntimeHelperRegMethodRegLocation(func_offset, TargetReg(kArg0), rl_src, true);
270 } else {
271 // Use the direct pointer.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700272 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocArrayResolved);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800273 CallRuntimeHelperImmMethodRegLocation(func_offset, direct_type_ptr, rl_src, true);
274 }
275 } else {
276 // The slow path.
277 DCHECK_EQ(func_offset.Int32Value(), -1);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700278 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocArray);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800279 CallRuntimeHelperImmMethodRegLocation(func_offset, type_idx, rl_src, true);
280 }
281 DCHECK_NE(func_offset.Int32Value(), -1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700282 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -0700283 func_offset= QUICK_ENTRYPOINT_OFFSET(4, pAllocArrayWithAccessCheck);
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800284 CallRuntimeHelperImmMethodRegLocation(func_offset, type_idx, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700285 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700286 RegLocation rl_result = GetReturn(false);
287 StoreValue(rl_dest, rl_result);
288}
289
290/*
291 * Similar to GenNewArray, but with post-allocation initialization.
292 * Verifier guarantees we're dealing with an array class. Current
293 * code throws runtime exception "bad Filled array req" for 'D' and 'J'.
294 * Current code also throws internal unimp if not 'L', '[' or 'I'.
295 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700296void Mir2Lir::GenFilledNewArray(CallInfo* info) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700297 int elems = info->num_arg_words;
298 int type_idx = info->index;
299 FlushAllRegs(); /* Everything to home location */
Ian Rogersdd7624d2014-03-14 17:43:00 -0700300 ThreadOffset<4> func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700301 if (cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx, *cu_->dex_file,
302 type_idx)) {
Ian Rogersdd7624d2014-03-14 17:43:00 -0700303 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pCheckAndAllocArray);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700304 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -0700305 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pCheckAndAllocArrayWithAccessCheck);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700306 }
307 CallRuntimeHelperImmMethodImm(func_offset, type_idx, elems, true);
308 FreeTemp(TargetReg(kArg2));
309 FreeTemp(TargetReg(kArg1));
310 /*
311 * NOTE: the implicit target for Instruction::FILLED_NEW_ARRAY is the
312 * return region. Because AllocFromCode placed the new array
313 * in kRet0, we'll just lock it into place. When debugger support is
314 * added, it may be necessary to additionally copy all return
315 * values to a home location in thread-local storage
316 */
317 LockTemp(TargetReg(kRet0));
318
319 // TODO: use the correct component size, currently all supported types
320 // share array alignment with ints (see comment at head of function)
321 size_t component_size = sizeof(int32_t);
322
323 // Having a range of 0 is legal
324 if (info->is_range && (elems > 0)) {
325 /*
326 * Bit of ugliness here. We're going generate a mem copy loop
327 * on the register range, but it is possible that some regs
328 * in the range have been promoted. This is unlikely, but
329 * before generating the copy, we'll just force a flush
330 * of any regs in the source range that have been promoted to
331 * home location.
332 */
333 for (int i = 0; i < elems; i++) {
334 RegLocation loc = UpdateLoc(info->args[i]);
335 if (loc.location == kLocPhysReg) {
buzbee2700f7e2014-03-07 09:46:20 -0800336 StoreBaseDisp(TargetReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, kWord);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 }
338 }
339 /*
340 * TUNING note: generated code here could be much improved, but
341 * this is an uncommon operation and isn't especially performance
342 * critical.
343 */
buzbee2700f7e2014-03-07 09:46:20 -0800344 RegStorage r_src = AllocTemp();
345 RegStorage r_dst = AllocTemp();
346 RegStorage r_idx = AllocTemp();
347 RegStorage r_val;
Brian Carlstromdf629502013-07-17 22:39:56 -0700348 switch (cu_->instruction_set) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700349 case kThumb2:
350 r_val = TargetReg(kLr);
351 break;
352 case kX86:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700353 case kX86_64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700354 FreeTemp(TargetReg(kRet0));
355 r_val = AllocTemp();
356 break;
357 case kMips:
358 r_val = AllocTemp();
359 break;
360 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
361 }
362 // Set up source pointer
363 RegLocation rl_first = info->args[0];
364 OpRegRegImm(kOpAdd, r_src, TargetReg(kSp), SRegOffset(rl_first.s_reg_low));
365 // Set up the target pointer
366 OpRegRegImm(kOpAdd, r_dst, TargetReg(kRet0),
367 mirror::Array::DataOffset(component_size).Int32Value());
368 // Set up the loop counter (known to be > 0)
369 LoadConstant(r_idx, elems - 1);
370 // Generate the copy loop. Going backwards for convenience
371 LIR* target = NewLIR0(kPseudoTargetLabel);
372 // Copy next element
373 LoadBaseIndexed(r_src, r_idx, r_val, 2, kWord);
374 StoreBaseIndexed(r_dst, r_idx, r_val, 2, kWord);
375 FreeTemp(r_val);
376 OpDecAndBranch(kCondGe, r_idx, target);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700377 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700378 // Restore the target pointer
379 OpRegRegImm(kOpAdd, TargetReg(kRet0), r_dst,
380 -mirror::Array::DataOffset(component_size).Int32Value());
381 }
382 } else if (!info->is_range) {
383 // TUNING: interleave
384 for (int i = 0; i < elems; i++) {
385 RegLocation rl_arg = LoadValue(info->args[i], kCoreReg);
386 StoreBaseDisp(TargetReg(kRet0),
buzbee2700f7e2014-03-07 09:46:20 -0800387 mirror::Array::DataOffset(component_size).Int32Value() + i * 4,
388 rl_arg.reg, kWord);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700389 // If the LoadValue caused a temp to be allocated, free it
buzbee2700f7e2014-03-07 09:46:20 -0800390 if (IsTemp(rl_arg.reg)) {
391 FreeTemp(rl_arg.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700392 }
393 }
394 }
395 if (info->result.location != kLocInvalid) {
396 StoreValue(info->result, GetReturn(false /* not fp */));
397 }
398}
399
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800400//
401// Slow path to ensure a class is initialized for sget/sput.
402//
403class StaticFieldSlowPath : public Mir2Lir::LIRSlowPath {
404 public:
buzbee2700f7e2014-03-07 09:46:20 -0800405 StaticFieldSlowPath(Mir2Lir* m2l, LIR* unresolved, LIR* uninit, LIR* cont, int storage_index,
406 RegStorage r_base) :
407 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), unresolved, cont), uninit_(uninit),
408 storage_index_(storage_index), r_base_(r_base) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800409 }
410
411 void Compile() {
412 LIR* unresolved_target = GenerateTargetLabel();
413 uninit_->target = unresolved_target;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700414 m2l_->CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(4, pInitializeStaticStorage),
buzbee2700f7e2014-03-07 09:46:20 -0800415 storage_index_, true);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800416 // Copy helper's result into r_base, a no-op on all but MIPS.
417 m2l_->OpRegCopy(r_base_, m2l_->TargetReg(kRet0));
418
419 m2l_->OpUnconditionalBranch(cont_);
420 }
421
422 private:
423 LIR* const uninit_;
424 const int storage_index_;
buzbee2700f7e2014-03-07 09:46:20 -0800425 const RegStorage r_base_;
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800426};
427
Vladimir Markobe0e5462014-02-26 11:24:15 +0000428void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700429 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000430 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
431 cu_->compiler_driver->ProcessedStaticField(field_info.FastPut(), field_info.IsReferrersClass());
432 if (field_info.FastPut() && !SLOW_FIELD_PATH) {
433 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
buzbee2700f7e2014-03-07 09:46:20 -0800434 RegStorage r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000435 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700436 // Fast path, static storage base is this method's class
437 RegLocation rl_method = LoadCurrMethod();
Ian Rogers5ddb4102014-01-07 08:58:46 -0800438 r_base = AllocTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800439 LoadWordDisp(rl_method.reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base);
440 if (IsTemp(rl_method.reg)) {
441 FreeTemp(rl_method.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700442 }
443 } else {
444 // Medium path, static storage base in a different class which requires checks that the other
445 // class is initialized.
446 // TODO: remove initialized check now that we are initializing classes in the compiler driver.
Vladimir Markobe0e5462014-02-26 11:24:15 +0000447 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448 // May do runtime call so everything to home locations.
449 FlushAllRegs();
450 // Using fixed register to sync with possible call to runtime support.
buzbee2700f7e2014-03-07 09:46:20 -0800451 RegStorage r_method = TargetReg(kArg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700452 LockTemp(r_method);
453 LoadCurrMethodDirect(r_method);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800454 r_base = TargetReg(kArg0);
455 LockTemp(r_base);
buzbee2700f7e2014-03-07 09:46:20 -0800456 LoadWordDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800457 LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
Vladimir Markobe0e5462014-02-26 11:24:15 +0000458 sizeof(int32_t*) * field_info.StorageIndex(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800459 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Markobfea9c22014-01-17 17:49:33 +0000460 if (!field_info.IsInitialized() &&
461 (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
Ian Rogers5ddb4102014-01-07 08:58:46 -0800462 // Check if r_base is NULL or a not yet initialized class.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800463
464 // The slow path is invoked if the r_base is NULL or the class pointed
465 // to by it is not initialized.
Ian Rogers5ddb4102014-01-07 08:58:46 -0800466 LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
buzbee2700f7e2014-03-07 09:46:20 -0800467 RegStorage r_tmp = TargetReg(kArg2);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800468 LockTemp(r_tmp);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800469 LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800470 mirror::Class::StatusOffset().Int32Value(),
471 mirror::Class::kStatusInitialized, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800472 LIR* cont = NewLIR0(kPseudoTargetLabel);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800473
buzbee2700f7e2014-03-07 09:46:20 -0800474 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000475 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800476
477 FreeTemp(r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700478 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 FreeTemp(r_method);
480 }
481 // rBase now holds static storage base
482 if (is_long_or_double) {
483 rl_src = LoadValueWide(rl_src, kAnyReg);
484 } else {
485 rl_src = LoadValue(rl_src, kAnyReg);
486 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000487 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800488 // There might have been a store before this volatile one so insert StoreStore barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700489 GenMemBarrier(kStoreStore);
490 }
491 if (is_long_or_double) {
buzbee2700f7e2014-03-07 09:46:20 -0800492 StoreBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700493 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800494 StoreWordDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700495 }
Vladimir Markobe0e5462014-02-26 11:24:15 +0000496 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800497 // A load might follow the volatile store so insert a StoreLoad barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700498 GenMemBarrier(kStoreLoad);
499 }
500 if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
buzbee2700f7e2014-03-07 09:46:20 -0800501 MarkGCCard(rl_src.reg, r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700502 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800503 FreeTemp(r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 } else {
505 FlushAllRegs(); // Everything to home locations
Ian Rogersdd7624d2014-03-14 17:43:00 -0700506 ThreadOffset<4> setter_offset =
507 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(4, pSet64Static)
508 : (is_object ? QUICK_ENTRYPOINT_OFFSET(4, pSetObjStatic)
509 : QUICK_ENTRYPOINT_OFFSET(4, pSet32Static));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000510 CallRuntimeHelperImmRegLocation(setter_offset, field_info.FieldIndex(), rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700511 }
512}
513
Vladimir Markobe0e5462014-02-26 11:24:15 +0000514void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700515 bool is_long_or_double, bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000516 const MirSFieldLoweringInfo& field_info = mir_graph_->GetSFieldLoweringInfo(mir);
517 cu_->compiler_driver->ProcessedStaticField(field_info.FastGet(), field_info.IsReferrersClass());
518 if (field_info.FastGet() && !SLOW_FIELD_PATH) {
519 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
buzbee2700f7e2014-03-07 09:46:20 -0800520 RegStorage r_base;
Vladimir Markobe0e5462014-02-26 11:24:15 +0000521 if (field_info.IsReferrersClass()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522 // Fast path, static storage base is this method's class
523 RegLocation rl_method = LoadCurrMethod();
Ian Rogers5ddb4102014-01-07 08:58:46 -0800524 r_base = AllocTemp();
buzbee2700f7e2014-03-07 09:46:20 -0800525 LoadWordDisp(rl_method.reg, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), r_base);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526 } else {
527 // Medium path, static storage base in a different class which requires checks that the other
528 // class is initialized
Vladimir Markobe0e5462014-02-26 11:24:15 +0000529 DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700530 // May do runtime call so everything to home locations.
531 FlushAllRegs();
532 // Using fixed register to sync with possible call to runtime support.
buzbee2700f7e2014-03-07 09:46:20 -0800533 RegStorage r_method = TargetReg(kArg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700534 LockTemp(r_method);
535 LoadCurrMethodDirect(r_method);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800536 r_base = TargetReg(kArg0);
537 LockTemp(r_base);
buzbee2700f7e2014-03-07 09:46:20 -0800538 LoadWordDisp(r_method, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800539 LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
Vladimir Markobe0e5462014-02-26 11:24:15 +0000540 sizeof(int32_t*) * field_info.StorageIndex(), r_base);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800541 // r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
Vladimir Markobfea9c22014-01-17 17:49:33 +0000542 if (!field_info.IsInitialized() &&
543 (mir->optimization_flags & MIR_IGNORE_CLINIT_CHECK) == 0) {
Ian Rogers5ddb4102014-01-07 08:58:46 -0800544 // Check if r_base is NULL or a not yet initialized class.
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800545
546 // The slow path is invoked if the r_base is NULL or the class pointed
547 // to by it is not initialized.
Ian Rogers5ddb4102014-01-07 08:58:46 -0800548 LIR* unresolved_branch = OpCmpImmBranch(kCondEq, r_base, 0, NULL);
buzbee2700f7e2014-03-07 09:46:20 -0800549 RegStorage r_tmp = TargetReg(kArg2);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800550 LockTemp(r_tmp);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800551 LIR* uninit_branch = OpCmpMemImmBranch(kCondLt, r_tmp, r_base,
Mark Mendell766e9292014-01-27 07:55:47 -0800552 mirror::Class::StatusOffset().Int32Value(),
553 mirror::Class::kStatusInitialized, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800554 LIR* cont = NewLIR0(kPseudoTargetLabel);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800555
buzbee2700f7e2014-03-07 09:46:20 -0800556 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont,
Vladimir Markobe0e5462014-02-26 11:24:15 +0000557 field_info.StorageIndex(), r_base));
Ian Rogers5ddb4102014-01-07 08:58:46 -0800558
559 FreeTemp(r_tmp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561 FreeTemp(r_method);
562 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800563 // r_base now holds static storage base
Brian Carlstrom7940e442013-07-12 13:46:57 -0700564 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800565
Brian Carlstrom7940e442013-07-12 13:46:57 -0700566 if (is_long_or_double) {
buzbee2700f7e2014-03-07 09:46:20 -0800567 LoadBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, INVALID_SREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800569 LoadWordDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 }
Ian Rogers5ddb4102014-01-07 08:58:46 -0800571 FreeTemp(r_base);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800572
573 if (field_info.IsVolatile()) {
574 // Without context sensitive analysis, we must issue the most conservative barriers.
575 // In this case, either a load or store may follow so we issue both barriers.
576 GenMemBarrier(kLoadLoad);
577 GenMemBarrier(kLoadStore);
578 }
579
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 if (is_long_or_double) {
581 StoreValueWide(rl_dest, rl_result);
582 } else {
583 StoreValue(rl_dest, rl_result);
584 }
585 } else {
586 FlushAllRegs(); // Everything to home locations
Ian Rogersdd7624d2014-03-14 17:43:00 -0700587 ThreadOffset<4> getterOffset =
588 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(4, pGet64Static)
589 :(is_object ? QUICK_ENTRYPOINT_OFFSET(4, pGetObjStatic)
590 : QUICK_ENTRYPOINT_OFFSET(4, pGet32Static));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000591 CallRuntimeHelperImm(getterOffset, field_info.FieldIndex(), true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700592 if (is_long_or_double) {
593 RegLocation rl_result = GetReturnWide(rl_dest.fp);
594 StoreValueWide(rl_dest, rl_result);
595 } else {
596 RegLocation rl_result = GetReturn(rl_dest.fp);
597 StoreValue(rl_dest, rl_result);
598 }
599 }
600}
601
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800602// Generate code for all slow paths.
603void Mir2Lir::HandleSlowPaths() {
604 int n = slow_paths_.Size();
605 for (int i = 0; i < n; ++i) {
606 LIRSlowPath* slowpath = slow_paths_.Get(i);
607 slowpath->Compile();
608 }
609 slow_paths_.Reset();
610}
611
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700612void Mir2Lir::HandleSuspendLaunchPads() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700613 int num_elems = suspend_launchpads_.Size();
Ian Rogersdd7624d2014-03-14 17:43:00 -0700614 ThreadOffset<4> helper_offset = QUICK_ENTRYPOINT_OFFSET(4, pTestSuspend);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700615 for (int i = 0; i < num_elems; i++) {
616 ResetRegPool();
617 ResetDefTracking();
618 LIR* lab = suspend_launchpads_.Get(i);
buzbee0d829482013-10-11 15:24:55 -0700619 LIR* resume_lab = reinterpret_cast<LIR*>(UnwrapPointer(lab->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 current_dalvik_offset_ = lab->operands[1];
621 AppendLIR(lab);
buzbee2700f7e2014-03-07 09:46:20 -0800622 RegStorage r_tgt = CallHelperSetup(helper_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700623 CallHelper(r_tgt, helper_offset, true /* MarkSafepointPC */);
624 OpUnconditionalBranch(resume_lab);
625 }
626}
627
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700628void Mir2Lir::HandleThrowLaunchPads() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 int num_elems = throw_launchpads_.Size();
630 for (int i = 0; i < num_elems; i++) {
631 ResetRegPool();
632 ResetDefTracking();
633 LIR* lab = throw_launchpads_.Get(i);
634 current_dalvik_offset_ = lab->operands[1];
635 AppendLIR(lab);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700636 ThreadOffset<4> func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700637 int v1 = lab->operands[2];
638 int v2 = lab->operands[3];
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700639 const bool target_x86 = cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700640 switch (lab->operands[0]) {
641 case kThrowNullPointer:
Ian Rogersdd7624d2014-03-14 17:43:00 -0700642 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pThrowNullPointer);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 break;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700644 case kThrowConstantArrayBounds: // v1 is length reg (for Arm/Mips), v2 constant index
Brian Carlstrom7940e442013-07-12 13:46:57 -0700645 // v1 holds the constant array index. Mips/Arm uses v2 for length, x86 reloads.
646 if (target_x86) {
buzbee2700f7e2014-03-07 09:46:20 -0800647 OpRegMem(kOpMov, TargetReg(kArg1), RegStorage::Solo32(v1),
648 mirror::Array::LengthOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700649 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800650 OpRegCopy(TargetReg(kArg1), RegStorage::Solo32(v1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700651 }
652 // Make sure the following LoadConstant doesn't mess with kArg1.
653 LockTemp(TargetReg(kArg1));
654 LoadConstant(TargetReg(kArg0), v2);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700655 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pThrowArrayBounds);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 break;
657 case kThrowArrayBounds:
658 // Move v1 (array index) to kArg0 and v2 (array length) to kArg1
buzbee2700f7e2014-03-07 09:46:20 -0800659 if (v2 != TargetReg(kArg0).GetReg()) {
660 OpRegCopy(TargetReg(kArg0), RegStorage::Solo32(v1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 if (target_x86) {
662 // x86 leaves the array pointer in v2, so load the array length that the handler expects
buzbee2700f7e2014-03-07 09:46:20 -0800663 OpRegMem(kOpMov, TargetReg(kArg1), RegStorage::Solo32(v2),
664 mirror::Array::LengthOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700665 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800666 OpRegCopy(TargetReg(kArg1), RegStorage::Solo32(v2));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700667 }
668 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800669 if (v1 == TargetReg(kArg1).GetReg()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 // Swap v1 and v2, using kArg2 as a temp
buzbee2700f7e2014-03-07 09:46:20 -0800671 OpRegCopy(TargetReg(kArg2), RegStorage::Solo32(v1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 if (target_x86) {
673 // x86 leaves the array pointer in v2; load the array length that the handler expects
buzbee2700f7e2014-03-07 09:46:20 -0800674 OpRegMem(kOpMov, TargetReg(kArg1), RegStorage::Solo32(v2),
675 mirror::Array::LengthOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700676 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800677 OpRegCopy(TargetReg(kArg1), RegStorage::Solo32(v2));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700678 }
679 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2));
680 } else {
681 if (target_x86) {
682 // x86 leaves the array pointer in v2; load the array length that the handler expects
buzbee2700f7e2014-03-07 09:46:20 -0800683 OpRegMem(kOpMov, TargetReg(kArg1), RegStorage::Solo32(v2),
684 mirror::Array::LengthOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800686 OpRegCopy(TargetReg(kArg1), RegStorage::Solo32(v2));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700687 }
buzbee2700f7e2014-03-07 09:46:20 -0800688 OpRegCopy(TargetReg(kArg0), RegStorage::Solo32(v1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 }
690 }
Ian Rogersdd7624d2014-03-14 17:43:00 -0700691 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pThrowArrayBounds);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700692 break;
693 case kThrowDivZero:
Ian Rogersdd7624d2014-03-14 17:43:00 -0700694 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pThrowDivZero);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700695 break;
696 case kThrowNoSuchMethod:
buzbee2700f7e2014-03-07 09:46:20 -0800697 OpRegCopy(TargetReg(kArg0), RegStorage::Solo32(v1));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 func_offset =
Ian Rogersdd7624d2014-03-14 17:43:00 -0700699 QUICK_ENTRYPOINT_OFFSET(4, pThrowNoSuchMethod);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700701 default:
702 LOG(FATAL) << "Unexpected throw kind: " << lab->operands[0];
703 }
Vladimir Marko31c2aac2013-12-09 16:31:19 +0000704 ClobberCallerSave();
buzbee2700f7e2014-03-07 09:46:20 -0800705 RegStorage r_tgt = CallHelperSetup(func_offset);
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700706 CallHelper(r_tgt, func_offset, true /* MarkSafepointPC */, true /* UseLink */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700707 }
708}
709
Vladimir Markobe0e5462014-02-26 11:24:15 +0000710void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 RegLocation rl_dest, RegLocation rl_obj, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700712 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000713 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
714 cu_->compiler_driver->ProcessedInstanceField(field_info.FastGet());
715 if (field_info.FastGet() && !SLOW_FIELD_PATH) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700716 RegLocation rl_result;
717 RegisterClass reg_class = oat_reg_class_by_size(size);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000718 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 rl_obj = LoadValue(rl_obj, kCoreReg);
720 if (is_long_or_double) {
721 DCHECK(rl_dest.wide);
buzbee2700f7e2014-03-07 09:46:20 -0800722 GenNullCheck(rl_obj.reg, opt_flags);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700723 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700724 rl_result = EvalLoc(rl_dest, reg_class, true);
buzbee2700f7e2014-03-07 09:46:20 -0800725 // FIXME? duplicate null check?
726 GenNullCheck(rl_obj.reg, opt_flags);
727 LoadBaseDispWide(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg,
728 rl_obj.s_reg_low);
Dave Allisonb373e092014-02-20 16:06:36 -0800729 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000730 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800731 // Without context sensitive analysis, we must issue the most conservative barriers.
732 // In this case, either a load or store may follow so we issue both barriers.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700733 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800734 GenMemBarrier(kLoadStore);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735 }
736 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800737 RegStorage reg_ptr = AllocTemp();
738 OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg, field_info.FieldOffset().Int32Value());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 rl_result = EvalLoc(rl_dest, reg_class, true);
buzbee2700f7e2014-03-07 09:46:20 -0800740 LoadBaseDispWide(reg_ptr, 0, rl_result.reg, INVALID_SREG);
Dave Allisonf9439142014-03-27 15:10:22 -0700741 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000742 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800743 // Without context sensitive analysis, we must issue the most conservative barriers.
744 // In this case, either a load or store may follow so we issue both barriers.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800746 GenMemBarrier(kLoadStore);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700747 }
748 FreeTemp(reg_ptr);
749 }
750 StoreValueWide(rl_dest, rl_result);
751 } else {
752 rl_result = EvalLoc(rl_dest, reg_class, true);
buzbee2700f7e2014-03-07 09:46:20 -0800753 GenNullCheck(rl_obj.reg, opt_flags);
754 LoadBaseDisp(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg, kWord,
755 rl_obj.s_reg_low);
Dave Allisonb373e092014-02-20 16:06:36 -0800756 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000757 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800758 // Without context sensitive analysis, we must issue the most conservative barriers.
759 // In this case, either a load or store may follow so we issue both barriers.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700760 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800761 GenMemBarrier(kLoadStore);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700762 }
763 StoreValue(rl_dest, rl_result);
764 }
765 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -0700766 ThreadOffset<4> getterOffset =
767 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(4, pGet64Instance)
768 : (is_object ? QUICK_ENTRYPOINT_OFFSET(4, pGetObjInstance)
769 : QUICK_ENTRYPOINT_OFFSET(4, pGet32Instance));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000770 CallRuntimeHelperImmRegLocation(getterOffset, field_info.FieldIndex(), rl_obj, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700771 if (is_long_or_double) {
772 RegLocation rl_result = GetReturnWide(rl_dest.fp);
773 StoreValueWide(rl_dest, rl_result);
774 } else {
775 RegLocation rl_result = GetReturn(rl_dest.fp);
776 StoreValue(rl_dest, rl_result);
777 }
778 }
779}
780
Vladimir Markobe0e5462014-02-26 11:24:15 +0000781void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700782 RegLocation rl_src, RegLocation rl_obj, bool is_long_or_double,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700783 bool is_object) {
Vladimir Markobe0e5462014-02-26 11:24:15 +0000784 const MirIFieldLoweringInfo& field_info = mir_graph_->GetIFieldLoweringInfo(mir);
785 cu_->compiler_driver->ProcessedInstanceField(field_info.FastPut());
786 if (field_info.FastPut() && !SLOW_FIELD_PATH) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700787 RegisterClass reg_class = oat_reg_class_by_size(size);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000788 DCHECK_GE(field_info.FieldOffset().Int32Value(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 rl_obj = LoadValue(rl_obj, kCoreReg);
790 if (is_long_or_double) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 rl_src = LoadValueWide(rl_src, kAnyReg);
buzbee2700f7e2014-03-07 09:46:20 -0800792 GenNullCheck(rl_obj.reg, opt_flags);
793 RegStorage reg_ptr = AllocTemp();
794 OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg, field_info.FieldOffset().Int32Value());
Vladimir Markobe0e5462014-02-26 11:24:15 +0000795 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800796 // There might have been a store before this volatile one so insert StoreStore barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700797 GenMemBarrier(kStoreStore);
798 }
buzbee2700f7e2014-03-07 09:46:20 -0800799 StoreBaseDispWide(reg_ptr, 0, rl_src.reg);
Dave Allisonb373e092014-02-20 16:06:36 -0800800 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000801 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800802 // A load might follow the volatile store so insert a StoreLoad barrier.
803 GenMemBarrier(kStoreLoad);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700804 }
805 FreeTemp(reg_ptr);
806 } else {
807 rl_src = LoadValue(rl_src, reg_class);
buzbee2700f7e2014-03-07 09:46:20 -0800808 GenNullCheck(rl_obj.reg, opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000809 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800810 // There might have been a store before this volatile one so insert StoreStore barrier.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700811 GenMemBarrier(kStoreStore);
812 }
buzbee2700f7e2014-03-07 09:46:20 -0800813 StoreBaseDisp(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_src.reg, kWord);
Dave Allisonb373e092014-02-20 16:06:36 -0800814 MarkPossibleNullPointerException(opt_flags);
Vladimir Markobe0e5462014-02-26 11:24:15 +0000815 if (field_info.IsVolatile()) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800816 // A load might follow the volatile store so insert a StoreLoad barrier.
817 GenMemBarrier(kStoreLoad);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 }
819 if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
buzbee2700f7e2014-03-07 09:46:20 -0800820 MarkGCCard(rl_src.reg, rl_obj.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700821 }
822 }
823 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -0700824 ThreadOffset<4> setter_offset =
825 is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(4, pSet64Instance)
826 : (is_object ? QUICK_ENTRYPOINT_OFFSET(4, pSetObjInstance)
827 : QUICK_ENTRYPOINT_OFFSET(4, pSet32Instance));
Vladimir Markobe0e5462014-02-26 11:24:15 +0000828 CallRuntimeHelperImmRegLocationRegLocation(setter_offset, field_info.FieldIndex(),
829 rl_obj, rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700830 }
831}
832
Ian Rogersa9a82542013-10-04 11:17:26 -0700833void Mir2Lir::GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index,
834 RegLocation rl_src) {
835 bool needs_range_check = !(opt_flags & MIR_IGNORE_RANGE_CHECK);
836 bool needs_null_check = !((cu_->disable_opt & (1 << kNullCheckElimination)) &&
837 (opt_flags & MIR_IGNORE_NULL_CHECK));
Ian Rogersdd7624d2014-03-14 17:43:00 -0700838 ThreadOffset<4> helper = needs_range_check
839 ? (needs_null_check ? QUICK_ENTRYPOINT_OFFSET(4, pAputObjectWithNullAndBoundCheck)
840 : QUICK_ENTRYPOINT_OFFSET(4, pAputObjectWithBoundCheck))
841 : QUICK_ENTRYPOINT_OFFSET(4, pAputObject);
Ian Rogersa9a82542013-10-04 11:17:26 -0700842 CallRuntimeHelperRegLocationRegLocationRegLocation(helper, rl_array, rl_index, rl_src, true);
843}
844
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700845void Mir2Lir::GenConstClass(uint32_t type_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846 RegLocation rl_method = LoadCurrMethod();
buzbee2700f7e2014-03-07 09:46:20 -0800847 RegStorage res_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700848 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
849 if (!cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
850 *cu_->dex_file,
851 type_idx)) {
852 // Call out to helper which resolves type and verifies access.
853 // Resolved type returned in kRet0.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700854 CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(4, pInitializeTypeAndVerifyAccess),
buzbee2700f7e2014-03-07 09:46:20 -0800855 type_idx, rl_method.reg, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 RegLocation rl_result = GetReturn(false);
857 StoreValue(rl_dest, rl_result);
858 } else {
859 // We're don't need access checks, load type from dex cache
860 int32_t dex_cache_offset =
Brian Carlstromea46f952013-07-30 01:26:50 -0700861 mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value();
buzbee2700f7e2014-03-07 09:46:20 -0800862 LoadWordDisp(rl_method.reg, dex_cache_offset, res_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700863 int32_t offset_of_type =
864 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() + (sizeof(mirror::Class*)
865 * type_idx);
buzbee2700f7e2014-03-07 09:46:20 -0800866 LoadWordDisp(res_reg, offset_of_type, rl_result.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700867 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file,
868 type_idx) || SLOW_TYPE_PATH) {
869 // Slow path, at runtime test if type is null and if so initialize
870 FlushAllRegs();
buzbee2700f7e2014-03-07 09:46:20 -0800871 LIR* branch = OpCmpImmBranch(kCondEq, rl_result.reg, 0, NULL);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800872 LIR* cont = NewLIR0(kPseudoTargetLabel);
873
874 // Object to generate the slow path for class resolution.
875 class SlowPath : public LIRSlowPath {
876 public:
877 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, const int type_idx,
878 const RegLocation& rl_method, const RegLocation& rl_result) :
879 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), type_idx_(type_idx),
880 rl_method_(rl_method), rl_result_(rl_result) {
881 }
882
883 void Compile() {
884 GenerateTargetLabel();
885
Ian Rogersdd7624d2014-03-14 17:43:00 -0700886 m2l_->CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(4, pInitializeType), type_idx_,
buzbee2700f7e2014-03-07 09:46:20 -0800887 rl_method_.reg, true);
888 m2l_->OpRegCopy(rl_result_.reg, m2l_->TargetReg(kRet0));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800889
890 m2l_->OpUnconditionalBranch(cont_);
891 }
892
893 private:
894 const int type_idx_;
895 const RegLocation rl_method_;
896 const RegLocation rl_result_;
897 };
898
899 // Add to list for future.
buzbee2700f7e2014-03-07 09:46:20 -0800900 AddSlowPath(new (arena_) SlowPath(this, branch, cont, type_idx, rl_method, rl_result));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800901
Brian Carlstrom7940e442013-07-12 13:46:57 -0700902 StoreValue(rl_dest, rl_result);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800903 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 // Fast path, we're done - just store result
905 StoreValue(rl_dest, rl_result);
906 }
907 }
908}
909
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700910void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700911 /* NOTE: Most strings should be available at compile time */
912 int32_t offset_of_string = mirror::Array::DataOffset(sizeof(mirror::String*)).Int32Value() +
913 (sizeof(mirror::String*) * string_idx);
914 if (!cu_->compiler_driver->CanAssumeStringIsPresentInDexCache(
915 *cu_->dex_file, string_idx) || SLOW_STRING_PATH) {
916 // slow path, resolve string if not in dex cache
917 FlushAllRegs();
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700918 LockCallTemps(); // Using explicit registers
Mark Mendell766e9292014-01-27 07:55:47 -0800919
920 // If the Method* is already in a register, we can save a copy.
921 RegLocation rl_method = mir_graph_->GetMethodLoc();
buzbee2700f7e2014-03-07 09:46:20 -0800922 RegStorage r_method;
Mark Mendell766e9292014-01-27 07:55:47 -0800923 if (rl_method.location == kLocPhysReg) {
924 // A temp would conflict with register use below.
buzbee2700f7e2014-03-07 09:46:20 -0800925 DCHECK(!IsTemp(rl_method.reg));
926 r_method = rl_method.reg;
Mark Mendell766e9292014-01-27 07:55:47 -0800927 } else {
928 r_method = TargetReg(kArg2);
929 LoadCurrMethodDirect(r_method);
930 }
931 LoadWordDisp(r_method, mirror::ArtMethod::DexCacheStringsOffset().Int32Value(),
932 TargetReg(kArg0));
933
Brian Carlstrom7940e442013-07-12 13:46:57 -0700934 // Might call out to helper, which will return resolved string in kRet0
Brian Carlstrom7940e442013-07-12 13:46:57 -0700935 LoadWordDisp(TargetReg(kArg0), offset_of_string, TargetReg(kRet0));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800936 if (cu_->instruction_set == kThumb2 ||
937 cu_->instruction_set == kMips) {
938 // OpRegImm(kOpCmp, TargetReg(kRet0), 0); // Is resolved?
Mark Mendell766e9292014-01-27 07:55:47 -0800939 LoadConstant(TargetReg(kArg1), string_idx);
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800940 LIR* fromfast = OpCmpImmBranch(kCondEq, TargetReg(kRet0), 0, NULL);
941 LIR* cont = NewLIR0(kPseudoTargetLabel);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700942 GenBarrier();
Mark Mendell766e9292014-01-27 07:55:47 -0800943
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800944 // Object to generate the slow path for string resolution.
945 class SlowPath : public LIRSlowPath {
946 public:
buzbee2700f7e2014-03-07 09:46:20 -0800947 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, RegStorage r_method) :
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800948 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), r_method_(r_method) {
949 }
950
951 void Compile() {
952 GenerateTargetLabel();
953
Dave Allison754ddad2014-02-19 14:05:39 -0800954 const CompilerOptions& compiler_options =
955 m2l_->cu_->compiler_driver->GetCompilerOptions();
956 if (compiler_options.GenerateHelperTrampolines()) {
957 m2l_->OpRegCopy(m2l_->TargetReg(kArg0), r_method_);
958 m2l_->CallHelper(RegStorage::InvalidReg(), QUICK_ENTRYPOINT_OFFSET(4, pResolveString),
959 true);
960 } else {
961 RegStorage r_tgt = m2l_->CallHelperSetup(QUICK_ENTRYPOINT_OFFSET(4, pResolveString));
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800962
Dave Allison754ddad2014-02-19 14:05:39 -0800963 m2l_->OpRegCopy(m2l_->TargetReg(kArg0), r_method_);
964 LIR* call_inst = m2l_->OpReg(kOpBlx, r_tgt);
965 m2l_->MarkSafepointPC(call_inst);
966 m2l_->FreeTemp(r_tgt);
967 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800968
969 m2l_->OpUnconditionalBranch(cont_);
970 }
971
972 private:
buzbee2700f7e2014-03-07 09:46:20 -0800973 RegStorage r_method_;
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800974 };
975
976 // Add to list for future.
977 AddSlowPath(new (arena_) SlowPath(this, fromfast, cont, r_method));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700978 } else {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700979 DCHECK(cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64);
Mark Mendell766e9292014-01-27 07:55:47 -0800980 LIR* branch = OpCmpImmBranch(kCondNe, TargetReg(kRet0), 0, NULL);
981 LoadConstant(TargetReg(kArg1), string_idx);
Ian Rogersdd7624d2014-03-14 17:43:00 -0700982 CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(4, pResolveString), r_method, TargetReg(kArg1),
buzbee2700f7e2014-03-07 09:46:20 -0800983 true);
Mark Mendell766e9292014-01-27 07:55:47 -0800984 LIR* target = NewLIR0(kPseudoTargetLabel);
985 branch->target = target;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700986 }
987 GenBarrier();
988 StoreValue(rl_dest, GetReturn(false));
989 } else {
990 RegLocation rl_method = LoadCurrMethod();
buzbee2700f7e2014-03-07 09:46:20 -0800991 RegStorage res_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700992 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800993 LoadWordDisp(rl_method.reg, mirror::ArtMethod::DexCacheStringsOffset().Int32Value(), res_reg);
994 LoadWordDisp(res_reg, offset_of_string, rl_result.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700995 StoreValue(rl_dest, rl_result);
996 }
997}
998
999/*
1000 * Let helper function take care of everything. Will
1001 * call Class::NewInstanceFromCode(type_idx, method);
1002 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001003void Mir2Lir::GenNewInstance(uint32_t type_idx, RegLocation rl_dest) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001004 FlushAllRegs(); /* Everything to home location */
1005 // alloc will always check for resolution, do we also need to verify
1006 // access because the verifier was unable to?
Ian Rogersdd7624d2014-03-14 17:43:00 -07001007 ThreadOffset<4> func_offset(-1);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001008 const DexFile* dex_file = cu_->dex_file;
1009 CompilerDriver* driver = cu_->compiler_driver;
1010 if (driver->CanAccessInstantiableTypeWithoutChecks(
1011 cu_->method_idx, *dex_file, type_idx)) {
1012 bool is_type_initialized;
1013 bool use_direct_type_ptr;
1014 uintptr_t direct_type_ptr;
1015 if (kEmbedClassInCode &&
1016 driver->CanEmbedTypeInCode(*dex_file, type_idx,
1017 &is_type_initialized, &use_direct_type_ptr, &direct_type_ptr)) {
1018 // The fast path.
1019 if (!use_direct_type_ptr) {
Mark Mendell55d0eac2014-02-06 11:02:52 -08001020 LoadClassType(type_idx, kArg0);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001021 if (!is_type_initialized) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001022 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObjectResolved);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001023 CallRuntimeHelperRegMethod(func_offset, TargetReg(kArg0), true);
1024 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001025 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObjectInitialized);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001026 CallRuntimeHelperRegMethod(func_offset, TargetReg(kArg0), true);
1027 }
1028 } else {
1029 // Use the direct pointer.
1030 if (!is_type_initialized) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001031 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObjectResolved);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001032 CallRuntimeHelperImmMethod(func_offset, direct_type_ptr, true);
1033 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001034 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObjectInitialized);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001035 CallRuntimeHelperImmMethod(func_offset, direct_type_ptr, true);
1036 }
1037 }
1038 } else {
1039 // The slow path.
1040 DCHECK_EQ(func_offset.Int32Value(), -1);
Ian Rogersdd7624d2014-03-14 17:43:00 -07001041 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObject);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001042 CallRuntimeHelperImmMethod(func_offset, type_idx, true);
1043 }
1044 DCHECK_NE(func_offset.Int32Value(), -1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001045 } else {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001046 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pAllocObjectWithAccessCheck);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -08001047 CallRuntimeHelperImmMethod(func_offset, type_idx, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001049 RegLocation rl_result = GetReturn(false);
1050 StoreValue(rl_dest, rl_result);
1051}
1052
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001053void Mir2Lir::GenThrow(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001054 FlushAllRegs();
Ian Rogersdd7624d2014-03-14 17:43:00 -07001055 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException), rl_src, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001056}
1057
1058// For final classes there are no sub-classes to check and so we can answer the instance-of
1059// question with simple comparisons.
1060void Mir2Lir::GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, RegLocation rl_dest,
1061 RegLocation rl_src) {
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001062 // X86 has its own implementation.
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001063 DCHECK(cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64);
Mark Mendelldf8ee2e2014-01-27 16:37:47 -08001064
Brian Carlstrom7940e442013-07-12 13:46:57 -07001065 RegLocation object = LoadValue(rl_src, kCoreReg);
1066 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001067 RegStorage result_reg = rl_result.reg;
1068 if (result_reg == object.reg) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001069 result_reg = AllocTypedTemp(false, kCoreReg);
1070 }
1071 LoadConstant(result_reg, 0); // assume false
buzbee2700f7e2014-03-07 09:46:20 -08001072 LIR* null_branchover = OpCmpImmBranch(kCondEq, object.reg, 0, NULL);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001073
buzbee2700f7e2014-03-07 09:46:20 -08001074 RegStorage check_class = AllocTypedTemp(false, kCoreReg);
1075 RegStorage object_class = AllocTypedTemp(false, kCoreReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076
1077 LoadCurrMethodDirect(check_class);
1078 if (use_declaring_class) {
buzbee2700f7e2014-03-07 09:46:20 -08001079 LoadWordDisp(check_class, mirror::ArtMethod::DeclaringClassOffset().Int32Value(), check_class);
1080 LoadWordDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001081 } else {
Brian Carlstromea46f952013-07-30 01:26:50 -07001082 LoadWordDisp(check_class, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001083 check_class);
buzbee2700f7e2014-03-07 09:46:20 -08001084 LoadWordDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 int32_t offset_of_type =
1086 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() +
1087 (sizeof(mirror::Class*) * type_idx);
1088 LoadWordDisp(check_class, offset_of_type, check_class);
1089 }
1090
1091 LIR* ne_branchover = NULL;
1092 if (cu_->instruction_set == kThumb2) {
1093 OpRegReg(kOpCmp, check_class, object_class); // Same?
Dave Allison3da67a52014-04-02 17:03:45 -07001094 LIR* it = OpIT(kCondEq, ""); // if-convert the test
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 LoadConstant(result_reg, 1); // .eq case - load true
Dave Allison3da67a52014-04-02 17:03:45 -07001096 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001097 } else {
1098 ne_branchover = OpCmpBranch(kCondNe, check_class, object_class, NULL);
1099 LoadConstant(result_reg, 1); // eq case - load true
1100 }
1101 LIR* target = NewLIR0(kPseudoTargetLabel);
1102 null_branchover->target = target;
1103 if (ne_branchover != NULL) {
1104 ne_branchover->target = target;
1105 }
1106 FreeTemp(object_class);
1107 FreeTemp(check_class);
1108 if (IsTemp(result_reg)) {
buzbee2700f7e2014-03-07 09:46:20 -08001109 OpRegCopy(rl_result.reg, result_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001110 FreeTemp(result_reg);
1111 }
1112 StoreValue(rl_dest, rl_result);
1113}
1114
1115void Mir2Lir::GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final,
1116 bool type_known_abstract, bool use_declaring_class,
1117 bool can_assume_type_is_in_dex_cache,
1118 uint32_t type_idx, RegLocation rl_dest,
1119 RegLocation rl_src) {
Mark Mendell6607d972014-02-10 06:54:18 -08001120 // X86 has its own implementation.
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001121 DCHECK(cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64);
Mark Mendell6607d972014-02-10 06:54:18 -08001122
Brian Carlstrom7940e442013-07-12 13:46:57 -07001123 FlushAllRegs();
1124 // May generate a call - use explicit registers
1125 LockCallTemps();
1126 LoadCurrMethodDirect(TargetReg(kArg1)); // kArg1 <= current Method*
buzbee2700f7e2014-03-07 09:46:20 -08001127 RegStorage class_reg = TargetReg(kArg2); // kArg2 will hold the Class*
Brian Carlstrom7940e442013-07-12 13:46:57 -07001128 if (needs_access_check) {
1129 // Check we have access to type_idx and if not throw IllegalAccessError,
1130 // returns Class* in kArg0
Ian Rogersdd7624d2014-03-14 17:43:00 -07001131 CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(4, pInitializeTypeAndVerifyAccess),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001132 type_idx, true);
1133 OpRegCopy(class_reg, TargetReg(kRet0)); // Align usage with fast path
1134 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
1135 } else if (use_declaring_class) {
1136 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
buzbee2700f7e2014-03-07 09:46:20 -08001137 LoadWordDisp(TargetReg(kArg1), mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
1138 class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001139 } else {
1140 // Load dex cache entry into class_reg (kArg2)
1141 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
buzbee2700f7e2014-03-07 09:46:20 -08001142 LoadWordDisp(TargetReg(kArg1), mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
1143 class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001144 int32_t offset_of_type =
1145 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() + (sizeof(mirror::Class*)
1146 * type_idx);
1147 LoadWordDisp(class_reg, offset_of_type, class_reg);
1148 if (!can_assume_type_is_in_dex_cache) {
1149 // Need to test presence of type in dex cache at runtime
1150 LIR* hop_branch = OpCmpImmBranch(kCondNe, class_reg, 0, NULL);
1151 // Not resolved
1152 // Call out to helper, which will return resolved type in kRet0
Ian Rogersdd7624d2014-03-14 17:43:00 -07001153 CallRuntimeHelperImm(QUICK_ENTRYPOINT_OFFSET(4, pInitializeType), type_idx, true);
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001154 OpRegCopy(TargetReg(kArg2), TargetReg(kRet0)); // Align usage with fast path
Brian Carlstrom7940e442013-07-12 13:46:57 -07001155 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); /* reload Ref */
1156 // Rejoin code paths
1157 LIR* hop_target = NewLIR0(kPseudoTargetLabel);
1158 hop_branch->target = hop_target;
1159 }
1160 }
1161 /* kArg0 is ref, kArg2 is class. If ref==null, use directly as bool result */
1162 RegLocation rl_result = GetReturn(false);
1163 if (cu_->instruction_set == kMips) {
1164 // On MIPS rArg0 != rl_result, place false in result if branch is taken.
buzbee2700f7e2014-03-07 09:46:20 -08001165 LoadConstant(rl_result.reg, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166 }
1167 LIR* branch1 = OpCmpImmBranch(kCondEq, TargetReg(kArg0), 0, NULL);
1168
1169 /* load object->klass_ */
1170 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
1171 LoadWordDisp(TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(), TargetReg(kArg1));
1172 /* kArg0 is ref, kArg1 is ref->klass_, kArg2 is class */
1173 LIR* branchover = NULL;
1174 if (type_known_final) {
1175 // rl_result == ref == null == 0.
1176 if (cu_->instruction_set == kThumb2) {
1177 OpRegReg(kOpCmp, TargetReg(kArg1), TargetReg(kArg2)); // Same?
Dave Allison3da67a52014-04-02 17:03:45 -07001178 LIR* it = OpIT(kCondEq, "E"); // if-convert the test
buzbee2700f7e2014-03-07 09:46:20 -08001179 LoadConstant(rl_result.reg, 1); // .eq case - load true
1180 LoadConstant(rl_result.reg, 0); // .ne case - load false
Dave Allison3da67a52014-04-02 17:03:45 -07001181 OpEndIT(it);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001182 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001183 LoadConstant(rl_result.reg, 0); // ne case - load false
Brian Carlstrom7940e442013-07-12 13:46:57 -07001184 branchover = OpCmpBranch(kCondNe, TargetReg(kArg1), TargetReg(kArg2), NULL);
buzbee2700f7e2014-03-07 09:46:20 -08001185 LoadConstant(rl_result.reg, 1); // eq case - load true
Brian Carlstrom7940e442013-07-12 13:46:57 -07001186 }
1187 } else {
1188 if (cu_->instruction_set == kThumb2) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001189 RegStorage r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(4, pInstanceofNonTrivial));
Dave Allison3da67a52014-04-02 17:03:45 -07001190 LIR* it = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001191 if (!type_known_abstract) {
1192 /* Uses conditional nullification */
1193 OpRegReg(kOpCmp, TargetReg(kArg1), TargetReg(kArg2)); // Same?
Dave Allison3da67a52014-04-02 17:03:45 -07001194 it = OpIT(kCondEq, "EE"); // if-convert the test
Brian Carlstrom7940e442013-07-12 13:46:57 -07001195 LoadConstant(TargetReg(kArg0), 1); // .eq case - load true
1196 }
1197 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2)); // .ne case - arg0 <= class
1198 OpReg(kOpBlx, r_tgt); // .ne case: helper(class, ref->class)
Dave Allison3da67a52014-04-02 17:03:45 -07001199 if (it != nullptr) {
1200 OpEndIT(it);
1201 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001202 FreeTemp(r_tgt);
1203 } else {
1204 if (!type_known_abstract) {
1205 /* Uses branchovers */
buzbee2700f7e2014-03-07 09:46:20 -08001206 LoadConstant(rl_result.reg, 1); // assume true
Brian Carlstrom7940e442013-07-12 13:46:57 -07001207 branchover = OpCmpBranch(kCondEq, TargetReg(kArg1), TargetReg(kArg2), NULL);
1208 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001209 RegStorage r_tgt = LoadHelper(QUICK_ENTRYPOINT_OFFSET(4, pInstanceofNonTrivial));
Mark Mendell6607d972014-02-10 06:54:18 -08001210 OpRegCopy(TargetReg(kArg0), TargetReg(kArg2)); // .ne case - arg0 <= class
1211 OpReg(kOpBlx, r_tgt); // .ne case: helper(class, ref->class)
1212 FreeTemp(r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001213 }
1214 }
1215 // TODO: only clobber when type isn't final?
Vladimir Marko31c2aac2013-12-09 16:31:19 +00001216 ClobberCallerSave();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001217 /* branch targets here */
1218 LIR* target = NewLIR0(kPseudoTargetLabel);
1219 StoreValue(rl_dest, rl_result);
1220 branch1->target = target;
1221 if (branchover != NULL) {
1222 branchover->target = target;
1223 }
1224}
1225
1226void Mir2Lir::GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src) {
1227 bool type_known_final, type_known_abstract, use_declaring_class;
1228 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1229 *cu_->dex_file,
1230 type_idx,
1231 &type_known_final,
1232 &type_known_abstract,
1233 &use_declaring_class);
1234 bool can_assume_type_is_in_dex_cache = !needs_access_check &&
1235 cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx);
1236
1237 if ((use_declaring_class || can_assume_type_is_in_dex_cache) && type_known_final) {
1238 GenInstanceofFinal(use_declaring_class, type_idx, rl_dest, rl_src);
1239 } else {
1240 GenInstanceofCallingHelper(needs_access_check, type_known_final, type_known_abstract,
1241 use_declaring_class, can_assume_type_is_in_dex_cache,
1242 type_idx, rl_dest, rl_src);
1243 }
1244}
1245
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001246void Mir2Lir::GenCheckCast(uint32_t insn_idx, uint32_t type_idx, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001247 bool type_known_final, type_known_abstract, use_declaring_class;
1248 bool needs_access_check = !cu_->compiler_driver->CanAccessTypeWithoutChecks(cu_->method_idx,
1249 *cu_->dex_file,
1250 type_idx,
1251 &type_known_final,
1252 &type_known_abstract,
1253 &use_declaring_class);
1254 // Note: currently type_known_final is unused, as optimizing will only improve the performance
1255 // of the exception throw path.
1256 DexCompilationUnit* cu = mir_graph_->GetCurrentDexCompilationUnit();
Vladimir Marko2730db02014-01-27 11:15:17 +00001257 if (!needs_access_check && cu_->compiler_driver->IsSafeCast(cu, insn_idx)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001258 // Verifier type analysis proved this check cast would never cause an exception.
1259 return;
1260 }
1261 FlushAllRegs();
1262 // May generate a call - use explicit registers
1263 LockCallTemps();
1264 LoadCurrMethodDirect(TargetReg(kArg1)); // kArg1 <= current Method*
buzbee2700f7e2014-03-07 09:46:20 -08001265 RegStorage class_reg = TargetReg(kArg2); // kArg2 will hold the Class*
Brian Carlstrom7940e442013-07-12 13:46:57 -07001266 if (needs_access_check) {
1267 // Check we have access to type_idx and if not throw IllegalAccessError,
1268 // returns Class* in kRet0
1269 // InitializeTypeAndVerifyAccess(idx, method)
Ian Rogersdd7624d2014-03-14 17:43:00 -07001270 CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(4, pInitializeTypeAndVerifyAccess),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001271 type_idx, TargetReg(kArg1), true);
1272 OpRegCopy(class_reg, TargetReg(kRet0)); // Align usage with fast path
1273 } else if (use_declaring_class) {
buzbee2700f7e2014-03-07 09:46:20 -08001274 LoadWordDisp(TargetReg(kArg1), mirror::ArtMethod::DeclaringClassOffset().Int32Value(),
1275 class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001276 } else {
1277 // Load dex cache entry into class_reg (kArg2)
buzbee2700f7e2014-03-07 09:46:20 -08001278 LoadWordDisp(TargetReg(kArg1), mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
1279 class_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001280 int32_t offset_of_type =
1281 mirror::Array::DataOffset(sizeof(mirror::Class*)).Int32Value() +
1282 (sizeof(mirror::Class*) * type_idx);
1283 LoadWordDisp(class_reg, offset_of_type, class_reg);
1284 if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx)) {
1285 // Need to test presence of type in dex cache at runtime
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001286 LIR* hop_branch = OpCmpImmBranch(kCondEq, class_reg, 0, NULL);
1287 LIR* cont = NewLIR0(kPseudoTargetLabel);
1288
1289 // Slow path to initialize the type. Executed if the type is NULL.
1290 class SlowPath : public LIRSlowPath {
1291 public:
1292 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, const int type_idx,
buzbee2700f7e2014-03-07 09:46:20 -08001293 const RegStorage class_reg) :
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001294 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), type_idx_(type_idx),
1295 class_reg_(class_reg) {
1296 }
1297
1298 void Compile() {
1299 GenerateTargetLabel();
1300
1301 // Call out to helper, which will return resolved type in kArg0
1302 // InitializeTypeFromCode(idx, method)
Ian Rogersdd7624d2014-03-14 17:43:00 -07001303 m2l_->CallRuntimeHelperImmReg(QUICK_ENTRYPOINT_OFFSET(4, pInitializeType), type_idx_,
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001304 m2l_->TargetReg(kArg1), true);
1305 m2l_->OpRegCopy(class_reg_, m2l_->TargetReg(kRet0)); // Align usage with fast path
1306 m2l_->OpUnconditionalBranch(cont_);
1307 }
1308 public:
1309 const int type_idx_;
buzbee2700f7e2014-03-07 09:46:20 -08001310 const RegStorage class_reg_;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001311 };
1312
buzbee2700f7e2014-03-07 09:46:20 -08001313 AddSlowPath(new (arena_) SlowPath(this, hop_branch, cont, type_idx, class_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001314 }
1315 }
1316 // At this point, class_reg (kArg2) has class
1317 LoadValueDirectFixed(rl_src, TargetReg(kArg0)); // kArg0 <= ref
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001318
1319 // Slow path for the case where the classes are not equal. In this case we need
1320 // to call a helper function to do the check.
1321 class SlowPath : public LIRSlowPath {
1322 public:
1323 SlowPath(Mir2Lir* m2l, LIR* fromfast, LIR* cont, bool load):
1324 LIRSlowPath(m2l, m2l->GetCurrentDexPc(), fromfast, cont), load_(load) {
1325 }
1326
1327 void Compile() {
1328 GenerateTargetLabel();
1329
1330 if (load_) {
1331 m2l_->LoadWordDisp(m2l_->TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(),
1332 m2l_->TargetReg(kArg1));
1333 }
Ian Rogersdd7624d2014-03-14 17:43:00 -07001334 m2l_->CallRuntimeHelperRegReg(QUICK_ENTRYPOINT_OFFSET(4, pCheckCast), m2l_->TargetReg(kArg2),
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001335 m2l_->TargetReg(kArg1), true);
1336
1337 m2l_->OpUnconditionalBranch(cont_);
1338 }
1339
1340 private:
1341 bool load_;
1342 };
1343
1344 if (type_known_abstract) {
1345 // Easier case, run slow path if target is non-null (slow path will load from target)
1346 LIR* branch = OpCmpImmBranch(kCondNe, TargetReg(kArg0), 0, NULL);
1347 LIR* cont = NewLIR0(kPseudoTargetLabel);
1348 AddSlowPath(new (arena_) SlowPath(this, branch, cont, true));
1349 } else {
1350 // Harder, more common case. We need to generate a forward branch over the load
1351 // if the target is null. If it's non-null we perform the load and branch to the
1352 // slow path if the classes are not equal.
1353
1354 /* Null is OK - continue */
1355 LIR* branch1 = OpCmpImmBranch(kCondEq, TargetReg(kArg0), 0, NULL);
1356 /* load object->klass_ */
1357 DCHECK_EQ(mirror::Object::ClassOffset().Int32Value(), 0);
buzbee2700f7e2014-03-07 09:46:20 -08001358 LoadWordDisp(TargetReg(kArg0), mirror::Object::ClassOffset().Int32Value(), TargetReg(kArg1));
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001359
1360 LIR* branch2 = OpCmpBranch(kCondNe, TargetReg(kArg1), class_reg, NULL);
1361 LIR* cont = NewLIR0(kPseudoTargetLabel);
1362
1363 // Add the slow path that will not perform load since this is already done.
1364 AddSlowPath(new (arena_) SlowPath(this, branch2, cont, false));
1365
1366 // Set the null check to branch to the continuation.
1367 branch1->target = cont;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001368 }
1369}
1370
1371void Mir2Lir::GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001372 RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001373 RegLocation rl_result;
1374 if (cu_->instruction_set == kThumb2) {
1375 /*
1376 * NOTE: This is the one place in the code in which we might have
1377 * as many as six live temporary registers. There are 5 in the normal
1378 * set for Arm. Until we have spill capabilities, temporarily add
1379 * lr to the temp set. It is safe to do this locally, but note that
1380 * lr is used explicitly elsewhere in the code generator and cannot
1381 * normally be used as a general temp register.
1382 */
1383 MarkTemp(TargetReg(kLr)); // Add lr to the temp pool
1384 FreeTemp(TargetReg(kLr)); // and make it available
1385 }
1386 rl_src1 = LoadValueWide(rl_src1, kCoreReg);
1387 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1388 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1389 // The longs may overlap - use intermediate temp if so
buzbee2700f7e2014-03-07 09:46:20 -08001390 if ((rl_result.reg.GetLowReg() == rl_src1.reg.GetHighReg()) || (rl_result.reg.GetLowReg() == rl_src2.reg.GetHighReg())) {
1391 RegStorage t_reg = AllocTemp();
1392 OpRegRegReg(first_op, t_reg, rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
1393 OpRegRegReg(second_op, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
1394 OpRegCopy(rl_result.reg.GetLow(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001395 FreeTemp(t_reg);
1396 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001397 OpRegRegReg(first_op, rl_result.reg.GetLow(), rl_src1.reg.GetLow(), rl_src2.reg.GetLow());
1398 OpRegRegReg(second_op, rl_result.reg.GetHigh(), rl_src1.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001399 }
1400 /*
1401 * NOTE: If rl_dest refers to a frame variable in a large frame, the
1402 * following StoreValueWide might need to allocate a temp register.
1403 * To further work around the lack of a spill capability, explicitly
1404 * free any temps from rl_src1 & rl_src2 that aren't still live in rl_result.
1405 * Remove when spill is functional.
1406 */
1407 FreeRegLocTemps(rl_result, rl_src1);
1408 FreeRegLocTemps(rl_result, rl_src2);
1409 StoreValueWide(rl_dest, rl_result);
1410 if (cu_->instruction_set == kThumb2) {
1411 Clobber(TargetReg(kLr));
1412 UnmarkTemp(TargetReg(kLr)); // Remove lr from the temp pool
1413 }
1414}
1415
1416
1417void Mir2Lir::GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001418 RegLocation rl_src1, RegLocation rl_shift) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001419 ThreadOffset<4> func_offset(-1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001420
1421 switch (opcode) {
1422 case Instruction::SHL_LONG:
1423 case Instruction::SHL_LONG_2ADDR:
Ian Rogersdd7624d2014-03-14 17:43:00 -07001424 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pShlLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001425 break;
1426 case Instruction::SHR_LONG:
1427 case Instruction::SHR_LONG_2ADDR:
Ian Rogersdd7624d2014-03-14 17:43:00 -07001428 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pShrLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001429 break;
1430 case Instruction::USHR_LONG:
1431 case Instruction::USHR_LONG_2ADDR:
Ian Rogersdd7624d2014-03-14 17:43:00 -07001432 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pUshrLong);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001433 break;
1434 default:
1435 LOG(FATAL) << "Unexpected case";
1436 }
1437 FlushAllRegs(); /* Send everything to home location */
1438 CallRuntimeHelperRegLocationRegLocation(func_offset, rl_src1, rl_shift, false);
1439 RegLocation rl_result = GetReturnWide(false);
1440 StoreValueWide(rl_dest, rl_result);
1441}
1442
1443
1444void Mir2Lir::GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001445 RegLocation rl_src1, RegLocation rl_src2) {
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001446 DCHECK(cu_->instruction_set != kX86 && cu_->instruction_set != kX86_64);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001447 OpKind op = kOpBkpt;
1448 bool is_div_rem = false;
1449 bool check_zero = false;
1450 bool unary = false;
1451 RegLocation rl_result;
1452 bool shift_op = false;
1453 switch (opcode) {
1454 case Instruction::NEG_INT:
1455 op = kOpNeg;
1456 unary = true;
1457 break;
1458 case Instruction::NOT_INT:
1459 op = kOpMvn;
1460 unary = true;
1461 break;
1462 case Instruction::ADD_INT:
1463 case Instruction::ADD_INT_2ADDR:
1464 op = kOpAdd;
1465 break;
1466 case Instruction::SUB_INT:
1467 case Instruction::SUB_INT_2ADDR:
1468 op = kOpSub;
1469 break;
1470 case Instruction::MUL_INT:
1471 case Instruction::MUL_INT_2ADDR:
1472 op = kOpMul;
1473 break;
1474 case Instruction::DIV_INT:
1475 case Instruction::DIV_INT_2ADDR:
1476 check_zero = true;
1477 op = kOpDiv;
1478 is_div_rem = true;
1479 break;
1480 /* NOTE: returns in kArg1 */
1481 case Instruction::REM_INT:
1482 case Instruction::REM_INT_2ADDR:
1483 check_zero = true;
1484 op = kOpRem;
1485 is_div_rem = true;
1486 break;
1487 case Instruction::AND_INT:
1488 case Instruction::AND_INT_2ADDR:
1489 op = kOpAnd;
1490 break;
1491 case Instruction::OR_INT:
1492 case Instruction::OR_INT_2ADDR:
1493 op = kOpOr;
1494 break;
1495 case Instruction::XOR_INT:
1496 case Instruction::XOR_INT_2ADDR:
1497 op = kOpXor;
1498 break;
1499 case Instruction::SHL_INT:
1500 case Instruction::SHL_INT_2ADDR:
1501 shift_op = true;
1502 op = kOpLsl;
1503 break;
1504 case Instruction::SHR_INT:
1505 case Instruction::SHR_INT_2ADDR:
1506 shift_op = true;
1507 op = kOpAsr;
1508 break;
1509 case Instruction::USHR_INT:
1510 case Instruction::USHR_INT_2ADDR:
1511 shift_op = true;
1512 op = kOpLsr;
1513 break;
1514 default:
1515 LOG(FATAL) << "Invalid word arith op: " << opcode;
1516 }
1517 if (!is_div_rem) {
1518 if (unary) {
1519 rl_src1 = LoadValue(rl_src1, kCoreReg);
1520 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001521 OpRegReg(op, rl_result.reg, rl_src1.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001522 } else {
1523 if (shift_op) {
Mark Mendellfeb2b4e2014-01-28 12:59:49 -08001524 rl_src2 = LoadValue(rl_src2, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001525 RegStorage t_reg = AllocTemp();
1526 OpRegRegImm(kOpAnd, t_reg, rl_src2.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001527 rl_src1 = LoadValue(rl_src1, kCoreReg);
1528 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001529 OpRegRegReg(op, rl_result.reg, rl_src1.reg, t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001530 FreeTemp(t_reg);
1531 } else {
1532 rl_src1 = LoadValue(rl_src1, kCoreReg);
1533 rl_src2 = LoadValue(rl_src2, kCoreReg);
1534 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08001535 OpRegRegReg(op, rl_result.reg, rl_src1.reg, rl_src2.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001536 }
1537 }
1538 StoreValue(rl_dest, rl_result);
1539 } else {
Dave Allison70202782013-10-22 17:52:19 -07001540 bool done = false; // Set to true if we happen to find a way to use a real instruction.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001541 if (cu_->instruction_set == kMips) {
1542 rl_src1 = LoadValue(rl_src1, kCoreReg);
1543 rl_src2 = LoadValue(rl_src2, kCoreReg);
1544 if (check_zero) {
buzbee2700f7e2014-03-07 09:46:20 -08001545 GenImmedCheck(kCondEq, rl_src2.reg, 0, kThrowDivZero);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001546 }
buzbee2700f7e2014-03-07 09:46:20 -08001547 rl_result = GenDivRem(rl_dest, rl_src1.reg, rl_src2.reg, op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001548 done = true;
1549 } else if (cu_->instruction_set == kThumb2) {
1550 if (cu_->GetInstructionSetFeatures().HasDivideInstruction()) {
1551 // Use ARM SDIV instruction for division. For remainder we also need to
1552 // calculate using a MUL and subtract.
1553 rl_src1 = LoadValue(rl_src1, kCoreReg);
1554 rl_src2 = LoadValue(rl_src2, kCoreReg);
1555 if (check_zero) {
buzbee2700f7e2014-03-07 09:46:20 -08001556 GenImmedCheck(kCondEq, rl_src2.reg, 0, kThrowDivZero);
Dave Allison70202782013-10-22 17:52:19 -07001557 }
buzbee2700f7e2014-03-07 09:46:20 -08001558 rl_result = GenDivRem(rl_dest, rl_src1.reg, rl_src2.reg, op == kOpDiv);
Dave Allison70202782013-10-22 17:52:19 -07001559 done = true;
1560 }
1561 }
1562
1563 // If we haven't already generated the code use the callout function.
1564 if (!done) {
Ian Rogersdd7624d2014-03-14 17:43:00 -07001565 ThreadOffset<4> func_offset = QUICK_ENTRYPOINT_OFFSET(4, pIdivmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001566 FlushAllRegs(); /* Send everything to home location */
1567 LoadValueDirectFixed(rl_src2, TargetReg(kArg1));
buzbee2700f7e2014-03-07 09:46:20 -08001568 RegStorage r_tgt = CallHelperSetup(func_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001569 LoadValueDirectFixed(rl_src1, TargetReg(kArg0));
1570 if (check_zero) {
1571 GenImmedCheck(kCondEq, TargetReg(kArg1), 0, kThrowDivZero);
1572 }
Dave Allison70202782013-10-22 17:52:19 -07001573 // NOTE: callout here is not a safepoint.
Brian Carlstromdf629502013-07-17 22:39:56 -07001574 CallHelper(r_tgt, func_offset, false /* not a safepoint */);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001575 if (op == kOpDiv)
1576 rl_result = GetReturn(false);
1577 else
1578 rl_result = GetReturnAlt();
1579 }
1580 StoreValue(rl_dest, rl_result);
1581 }
1582}
1583
1584/*
1585 * The following are the first-level codegen routines that analyze the format
1586 * of each bytecode then either dispatch special purpose codegen routines
1587 * or produce corresponding Thumb instructions directly.
1588 */
1589
Brian Carlstrom7940e442013-07-12 13:46:57 -07001590// Returns true if no more than two bits are set in 'x'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001591static bool IsPopCountLE2(unsigned int x) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001592 x &= x - 1;
1593 return (x & (x - 1)) == 0;
1594}
1595
Brian Carlstrom7940e442013-07-12 13:46:57 -07001596// Returns true if it added instructions to 'cu' to divide 'rl_src' by 'lit'
1597// and store the result in 'rl_dest'.
buzbee11b63d12013-08-27 07:34:17 -07001598bool Mir2Lir::HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001599 RegLocation rl_src, RegLocation rl_dest, int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001600 if ((lit < 2) || ((cu_->instruction_set != kThumb2) && !IsPowerOfTwo(lit))) {
1601 return false;
1602 }
1603 // No divide instruction for Arm, so check for more special cases
1604 if ((cu_->instruction_set == kThumb2) && !IsPowerOfTwo(lit)) {
buzbee11b63d12013-08-27 07:34:17 -07001605 return SmallLiteralDivRem(dalvik_opcode, is_div, rl_src, rl_dest, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001606 }
1607 int k = LowestSetBit(lit);
1608 if (k >= 30) {
1609 // Avoid special cases.
1610 return false;
1611 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001612 rl_src = LoadValue(rl_src, kCoreReg);
1613 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee11b63d12013-08-27 07:34:17 -07001614 if (is_div) {
buzbee2700f7e2014-03-07 09:46:20 -08001615 RegStorage t_reg = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001616 if (lit == 2) {
1617 // Division by 2 is by far the most common division by constant.
buzbee2700f7e2014-03-07 09:46:20 -08001618 OpRegRegImm(kOpLsr, t_reg, rl_src.reg, 32 - k);
1619 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg);
1620 OpRegRegImm(kOpAsr, rl_result.reg, t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001621 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001622 OpRegRegImm(kOpAsr, t_reg, rl_src.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001623 OpRegRegImm(kOpLsr, t_reg, t_reg, 32 - k);
buzbee2700f7e2014-03-07 09:46:20 -08001624 OpRegRegReg(kOpAdd, t_reg, t_reg, rl_src.reg);
1625 OpRegRegImm(kOpAsr, rl_result.reg, t_reg, k);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001626 }
1627 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001628 RegStorage t_reg1 = AllocTemp();
1629 RegStorage t_reg2 = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001630 if (lit == 2) {
buzbee2700f7e2014-03-07 09:46:20 -08001631 OpRegRegImm(kOpLsr, t_reg1, rl_src.reg, 32 - k);
1632 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001633 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit -1);
buzbee2700f7e2014-03-07 09:46:20 -08001634 OpRegRegReg(kOpSub, rl_result.reg, t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001635 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001636 OpRegRegImm(kOpAsr, t_reg1, rl_src.reg, 31);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001637 OpRegRegImm(kOpLsr, t_reg1, t_reg1, 32 - k);
buzbee2700f7e2014-03-07 09:46:20 -08001638 OpRegRegReg(kOpAdd, t_reg2, t_reg1, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001639 OpRegRegImm(kOpAnd, t_reg2, t_reg2, lit - 1);
buzbee2700f7e2014-03-07 09:46:20 -08001640 OpRegRegReg(kOpSub, rl_result.reg, t_reg2, t_reg1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001641 }
1642 }
1643 StoreValue(rl_dest, rl_result);
1644 return true;
1645}
1646
1647// Returns true if it added instructions to 'cu' to multiply 'rl_src' by 'lit'
1648// and store the result in 'rl_dest'.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001649bool Mir2Lir::HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001650 if (lit < 0) {
1651 return false;
1652 }
1653 if (lit == 0) {
1654 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1655 LoadConstant(rl_result.reg, 0);
1656 StoreValue(rl_dest, rl_result);
1657 return true;
1658 }
1659 if (lit == 1) {
1660 rl_src = LoadValue(rl_src, kCoreReg);
1661 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1662 OpRegCopy(rl_result.reg, rl_src.reg);
1663 StoreValue(rl_dest, rl_result);
1664 return true;
1665 }
Zheng Xuf9719f92014-04-02 13:31:31 +01001666 // There is RegRegRegShift on Arm, so check for more special cases
1667 if (cu_->instruction_set == kThumb2) {
Ian Rogerse2143c02014-03-28 08:47:16 -07001668 return EasyMultiply(rl_src, rl_dest, lit);
1669 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001670 // Can we simplify this multiplication?
1671 bool power_of_two = false;
1672 bool pop_count_le2 = false;
1673 bool power_of_two_minus_one = false;
Ian Rogerse2143c02014-03-28 08:47:16 -07001674 if (IsPowerOfTwo(lit)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001675 power_of_two = true;
1676 } else if (IsPopCountLE2(lit)) {
1677 pop_count_le2 = true;
1678 } else if (IsPowerOfTwo(lit + 1)) {
1679 power_of_two_minus_one = true;
1680 } else {
1681 return false;
1682 }
1683 rl_src = LoadValue(rl_src, kCoreReg);
1684 RegLocation rl_result = EvalLoc(rl_dest, kCoreReg, true);
1685 if (power_of_two) {
1686 // Shift.
buzbee2700f7e2014-03-07 09:46:20 -08001687 OpRegRegImm(kOpLsl, rl_result.reg, rl_src.reg, LowestSetBit(lit));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001688 } else if (pop_count_le2) {
1689 // Shift and add and shift.
1690 int first_bit = LowestSetBit(lit);
1691 int second_bit = LowestSetBit(lit ^ (1 << first_bit));
1692 GenMultiplyByTwoBitMultiplier(rl_src, rl_result, lit, first_bit, second_bit);
1693 } else {
1694 // Reverse subtract: (src << (shift + 1)) - src.
1695 DCHECK(power_of_two_minus_one);
1696 // TUNING: rsb dst, src, src lsl#LowestSetBit(lit + 1)
buzbee2700f7e2014-03-07 09:46:20 -08001697 RegStorage t_reg = AllocTemp();
1698 OpRegRegImm(kOpLsl, t_reg, rl_src.reg, LowestSetBit(lit + 1));
1699 OpRegRegReg(kOpSub, rl_result.reg, t_reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001700 }
1701 StoreValue(rl_dest, rl_result);
1702 return true;
1703}
1704
1705void Mir2Lir::GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001706 int lit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001707 RegLocation rl_result;
1708 OpKind op = static_cast<OpKind>(0); /* Make gcc happy */
1709 int shift_op = false;
1710 bool is_div = false;
1711
1712 switch (opcode) {
1713 case Instruction::RSUB_INT_LIT8:
1714 case Instruction::RSUB_INT: {
1715 rl_src = LoadValue(rl_src, kCoreReg);
1716 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1717 if (cu_->instruction_set == kThumb2) {
buzbee2700f7e2014-03-07 09:46:20 -08001718 OpRegRegImm(kOpRsub, rl_result.reg, rl_src.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001719 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001720 OpRegReg(kOpNeg, rl_result.reg, rl_src.reg);
1721 OpRegImm(kOpAdd, rl_result.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001722 }
1723 StoreValue(rl_dest, rl_result);
1724 return;
1725 }
1726
1727 case Instruction::SUB_INT:
1728 case Instruction::SUB_INT_2ADDR:
1729 lit = -lit;
1730 // Intended fallthrough
1731 case Instruction::ADD_INT:
1732 case Instruction::ADD_INT_2ADDR:
1733 case Instruction::ADD_INT_LIT8:
1734 case Instruction::ADD_INT_LIT16:
1735 op = kOpAdd;
1736 break;
1737 case Instruction::MUL_INT:
1738 case Instruction::MUL_INT_2ADDR:
1739 case Instruction::MUL_INT_LIT8:
1740 case Instruction::MUL_INT_LIT16: {
1741 if (HandleEasyMultiply(rl_src, rl_dest, lit)) {
1742 return;
1743 }
1744 op = kOpMul;
1745 break;
1746 }
1747 case Instruction::AND_INT:
1748 case Instruction::AND_INT_2ADDR:
1749 case Instruction::AND_INT_LIT8:
1750 case Instruction::AND_INT_LIT16:
1751 op = kOpAnd;
1752 break;
1753 case Instruction::OR_INT:
1754 case Instruction::OR_INT_2ADDR:
1755 case Instruction::OR_INT_LIT8:
1756 case Instruction::OR_INT_LIT16:
1757 op = kOpOr;
1758 break;
1759 case Instruction::XOR_INT:
1760 case Instruction::XOR_INT_2ADDR:
1761 case Instruction::XOR_INT_LIT8:
1762 case Instruction::XOR_INT_LIT16:
1763 op = kOpXor;
1764 break;
1765 case Instruction::SHL_INT_LIT8:
1766 case Instruction::SHL_INT:
1767 case Instruction::SHL_INT_2ADDR:
1768 lit &= 31;
1769 shift_op = true;
1770 op = kOpLsl;
1771 break;
1772 case Instruction::SHR_INT_LIT8:
1773 case Instruction::SHR_INT:
1774 case Instruction::SHR_INT_2ADDR:
1775 lit &= 31;
1776 shift_op = true;
1777 op = kOpAsr;
1778 break;
1779 case Instruction::USHR_INT_LIT8:
1780 case Instruction::USHR_INT:
1781 case Instruction::USHR_INT_2ADDR:
1782 lit &= 31;
1783 shift_op = true;
1784 op = kOpLsr;
1785 break;
1786
1787 case Instruction::DIV_INT:
1788 case Instruction::DIV_INT_2ADDR:
1789 case Instruction::DIV_INT_LIT8:
1790 case Instruction::DIV_INT_LIT16:
1791 case Instruction::REM_INT:
1792 case Instruction::REM_INT_2ADDR:
1793 case Instruction::REM_INT_LIT8:
1794 case Instruction::REM_INT_LIT16: {
1795 if (lit == 0) {
buzbee2700f7e2014-03-07 09:46:20 -08001796 GenImmedCheck(kCondAl, RegStorage::InvalidReg(), 0, kThrowDivZero);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001797 return;
1798 }
buzbee11b63d12013-08-27 07:34:17 -07001799 if ((opcode == Instruction::DIV_INT) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001800 (opcode == Instruction::DIV_INT_2ADDR) ||
buzbee11b63d12013-08-27 07:34:17 -07001801 (opcode == Instruction::DIV_INT_LIT8) ||
Brian Carlstrom7940e442013-07-12 13:46:57 -07001802 (opcode == Instruction::DIV_INT_LIT16)) {
1803 is_div = true;
1804 } else {
1805 is_div = false;
1806 }
buzbee11b63d12013-08-27 07:34:17 -07001807 if (HandleEasyDivRem(opcode, is_div, rl_src, rl_dest, lit)) {
1808 return;
1809 }
Dave Allison70202782013-10-22 17:52:19 -07001810
1811 bool done = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001812 if (cu_->instruction_set == kMips) {
1813 rl_src = LoadValue(rl_src, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001814 rl_result = GenDivRemLit(rl_dest, rl_src.reg, lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001815 done = true;
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001816 } else if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Mark Mendell2bf31e62014-01-23 12:13:40 -08001817 rl_result = GenDivRemLit(rl_dest, rl_src, lit, is_div);
1818 done = true;
Dave Allison70202782013-10-22 17:52:19 -07001819 } else if (cu_->instruction_set == kThumb2) {
1820 if (cu_->GetInstructionSetFeatures().HasDivideInstruction()) {
1821 // Use ARM SDIV instruction for division. For remainder we also need to
1822 // calculate using a MUL and subtract.
1823 rl_src = LoadValue(rl_src, kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -08001824 rl_result = GenDivRemLit(rl_dest, rl_src.reg, lit, is_div);
Dave Allison70202782013-10-22 17:52:19 -07001825 done = true;
1826 }
1827 }
1828
1829 if (!done) {
1830 FlushAllRegs(); /* Everything to home location. */
Brian Carlstrom7940e442013-07-12 13:46:57 -07001831 LoadValueDirectFixed(rl_src, TargetReg(kArg0));
1832 Clobber(TargetReg(kArg0));
Ian Rogersdd7624d2014-03-14 17:43:00 -07001833 ThreadOffset<4> func_offset = QUICK_ENTRYPOINT_OFFSET(4, pIdivmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001834 CallRuntimeHelperRegImm(func_offset, TargetReg(kArg0), lit, false);
1835 if (is_div)
1836 rl_result = GetReturn(false);
1837 else
1838 rl_result = GetReturnAlt();
1839 }
1840 StoreValue(rl_dest, rl_result);
1841 return;
1842 }
1843 default:
1844 LOG(FATAL) << "Unexpected opcode " << opcode;
1845 }
1846 rl_src = LoadValue(rl_src, kCoreReg);
1847 rl_result = EvalLoc(rl_dest, kCoreReg, true);
Dave Allison70202782013-10-22 17:52:19 -07001848 // Avoid shifts by literal 0 - no support in Thumb. Change to copy.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001849 if (shift_op && (lit == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -08001850 OpRegCopy(rl_result.reg, rl_src.reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001851 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001852 OpRegRegImm(op, rl_result.reg, rl_src.reg, lit);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001853 }
1854 StoreValue(rl_dest, rl_result);
1855}
1856
1857void Mir2Lir::GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001858 RegLocation rl_src1, RegLocation rl_src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001859 RegLocation rl_result;
1860 OpKind first_op = kOpBkpt;
1861 OpKind second_op = kOpBkpt;
1862 bool call_out = false;
1863 bool check_zero = false;
Ian Rogersdd7624d2014-03-14 17:43:00 -07001864 ThreadOffset<4> func_offset(-1);
buzbee2700f7e2014-03-07 09:46:20 -08001865 int ret_reg = TargetReg(kRet0).GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001866
1867 switch (opcode) {
1868 case Instruction::NOT_LONG:
1869 rl_src2 = LoadValueWide(rl_src2, kCoreReg);
1870 rl_result = EvalLoc(rl_dest, kCoreReg, true);
1871 // Check for destructive overlap
buzbee2700f7e2014-03-07 09:46:20 -08001872 if (rl_result.reg.GetLowReg() == rl_src2.reg.GetHighReg()) {
1873 RegStorage t_reg = AllocTemp();
1874 OpRegCopy(t_reg, rl_src2.reg.GetHigh());
1875 OpRegReg(kOpMvn, rl_result.reg.GetLow(), rl_src2.reg.GetLow());
1876 OpRegReg(kOpMvn, rl_result.reg.GetHigh(), t_reg);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001877 FreeTemp(t_reg);
1878 } else {
buzbee2700f7e2014-03-07 09:46:20 -08001879 OpRegReg(kOpMvn, rl_result.reg.GetLow(), rl_src2.reg.GetLow());
1880 OpRegReg(kOpMvn, rl_result.reg.GetHigh(), rl_src2.reg.GetHigh());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001881 }
1882 StoreValueWide(rl_dest, rl_result);
1883 return;
1884 case Instruction::ADD_LONG:
1885 case Instruction::ADD_LONG_2ADDR:
1886 if (cu_->instruction_set != kThumb2) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001887 GenAddLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001888 return;
1889 }
1890 first_op = kOpAdd;
1891 second_op = kOpAdc;
1892 break;
1893 case Instruction::SUB_LONG:
1894 case Instruction::SUB_LONG_2ADDR:
1895 if (cu_->instruction_set != kThumb2) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001896 GenSubLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001897 return;
1898 }
1899 first_op = kOpSub;
1900 second_op = kOpSbc;
1901 break;
1902 case Instruction::MUL_LONG:
1903 case Instruction::MUL_LONG_2ADDR:
Mark Mendell4708dcd2014-01-22 09:05:18 -08001904 if (cu_->instruction_set != kMips) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001905 GenMulLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001906 return;
1907 } else {
1908 call_out = true;
buzbee2700f7e2014-03-07 09:46:20 -08001909 ret_reg = TargetReg(kRet0).GetReg();
Ian Rogersdd7624d2014-03-14 17:43:00 -07001910 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pLmul);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001911 }
1912 break;
1913 case Instruction::DIV_LONG:
1914 case Instruction::DIV_LONG_2ADDR:
1915 call_out = true;
1916 check_zero = true;
buzbee2700f7e2014-03-07 09:46:20 -08001917 ret_reg = TargetReg(kRet0).GetReg();
Ian Rogersdd7624d2014-03-14 17:43:00 -07001918 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pLdiv);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001919 break;
1920 case Instruction::REM_LONG:
1921 case Instruction::REM_LONG_2ADDR:
1922 call_out = true;
1923 check_zero = true;
Ian Rogersdd7624d2014-03-14 17:43:00 -07001924 func_offset = QUICK_ENTRYPOINT_OFFSET(4, pLmod);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001925 /* NOTE - for Arm, result is in kArg2/kArg3 instead of kRet0/kRet1 */
buzbee2700f7e2014-03-07 09:46:20 -08001926 ret_reg = (cu_->instruction_set == kThumb2) ? TargetReg(kArg2).GetReg() : TargetReg(kRet0).GetReg();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001927 break;
1928 case Instruction::AND_LONG_2ADDR:
1929 case Instruction::AND_LONG:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001930 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001931 return GenAndLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001932 }
1933 first_op = kOpAnd;
1934 second_op = kOpAnd;
1935 break;
1936 case Instruction::OR_LONG:
1937 case Instruction::OR_LONG_2ADDR:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001938 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001939 GenOrLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001940 return;
1941 }
1942 first_op = kOpOr;
1943 second_op = kOpOr;
1944 break;
1945 case Instruction::XOR_LONG:
1946 case Instruction::XOR_LONG_2ADDR:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +07001947 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Mark Mendelle02d48f2014-01-15 11:19:23 -08001948 GenXorLong(opcode, rl_dest, rl_src1, rl_src2);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001949 return;
1950 }
1951 first_op = kOpXor;
1952 second_op = kOpXor;
1953 break;
1954 case Instruction::NEG_LONG: {
1955 GenNegLong(rl_dest, rl_src2);
1956 return;
1957 }
1958 default:
1959 LOG(FATAL) << "Invalid long arith op";
1960 }
1961 if (!call_out) {
1962 GenLong3Addr(first_op, second_op, rl_dest, rl_src1, rl_src2);
1963 } else {
1964 FlushAllRegs(); /* Send everything to home location */
1965 if (check_zero) {
buzbee2700f7e2014-03-07 09:46:20 -08001966 RegStorage r_tmp1 = RegStorage::MakeRegPair(TargetReg(kArg0), TargetReg(kArg1));
1967 RegStorage r_tmp2 = RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3));
1968 LoadValueDirectWideFixed(rl_src2, r_tmp2);
1969 RegStorage r_tgt = CallHelperSetup(func_offset);
1970 GenDivZeroCheck(RegStorage::MakeRegPair(TargetReg(kArg2), TargetReg(kArg3)));
1971 LoadValueDirectWideFixed(rl_src1, r_tmp1);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001972 // NOTE: callout here is not a safepoint
1973 CallHelper(r_tgt, func_offset, false /* not safepoint */);
1974 } else {
1975 CallRuntimeHelperRegLocationRegLocation(func_offset, rl_src1, rl_src2, false);
1976 }
1977 // Adjust return regs in to handle case of rem returning kArg2/kArg3
buzbee2700f7e2014-03-07 09:46:20 -08001978 if (ret_reg == TargetReg(kRet0).GetReg())
Brian Carlstrom7940e442013-07-12 13:46:57 -07001979 rl_result = GetReturnWide(false);
1980 else
1981 rl_result = GetReturnWideAlt();
1982 StoreValueWide(rl_dest, rl_result);
1983 }
1984}
1985
Ian Rogersdd7624d2014-03-14 17:43:00 -07001986void Mir2Lir::GenConversionCall(ThreadOffset<4> func_offset,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001987 RegLocation rl_dest, RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001988 /*
1989 * Don't optimize the register usage since it calls out to support
1990 * functions
1991 */
1992 FlushAllRegs(); /* Send everything to home location */
Brian Carlstrom7940e442013-07-12 13:46:57 -07001993 CallRuntimeHelperRegLocation(func_offset, rl_src, false);
1994 if (rl_dest.wide) {
1995 RegLocation rl_result;
1996 rl_result = GetReturnWide(rl_dest.fp);
1997 StoreValueWide(rl_dest, rl_result);
1998 } else {
1999 RegLocation rl_result;
2000 rl_result = GetReturn(rl_dest.fp);
2001 StoreValue(rl_dest, rl_result);
2002 }
2003}
2004
2005/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07002006void Mir2Lir::GenSuspendTest(int opt_flags) {
Dave Allisonb373e092014-02-20 16:06:36 -08002007 if (Runtime::Current()->ExplicitSuspendChecks()) {
2008 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2009 return;
2010 }
2011 FlushAllRegs();
2012 LIR* branch = OpTestSuspend(NULL);
2013 LIR* ret_lab = NewLIR0(kPseudoTargetLabel);
2014 LIR* target = RawLIR(current_dalvik_offset_, kPseudoSuspendTarget, WrapPointer(ret_lab),
2015 current_dalvik_offset_);
2016 branch->target = target;
2017 suspend_launchpads_.Insert(target);
2018 } else {
2019 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2020 return;
2021 }
2022 FlushAllRegs(); // TODO: needed?
2023 LIR* inst = CheckSuspendUsingLoad();
2024 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002025 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002026}
2027
2028/* Check if we need to check for pending suspend request */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07002029void Mir2Lir::GenSuspendTestAndBranch(int opt_flags, LIR* target) {
Dave Allisonb373e092014-02-20 16:06:36 -08002030 if (Runtime::Current()->ExplicitSuspendChecks()) {
2031 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2032 OpUnconditionalBranch(target);
2033 return;
2034 }
2035 OpTestSuspend(target);
2036 LIR* launch_pad =
2037 RawLIR(current_dalvik_offset_, kPseudoSuspendTarget, WrapPointer(target),
2038 current_dalvik_offset_);
2039 FlushAllRegs();
2040 OpUnconditionalBranch(launch_pad);
2041 suspend_launchpads_.Insert(launch_pad);
2042 } else {
2043 // For the implicit suspend check, just perform the trigger
2044 // load and branch to the target.
2045 if (NO_SUSPEND || (opt_flags & MIR_IGNORE_SUSPEND_CHECK)) {
2046 OpUnconditionalBranch(target);
2047 return;
2048 }
2049 FlushAllRegs();
2050 LIR* inst = CheckSuspendUsingLoad();
2051 MarkSafepointPC(inst);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002052 OpUnconditionalBranch(target);
Brian Carlstrom7940e442013-07-12 13:46:57 -07002053 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07002054}
2055
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002056/* Call out to helper assembly routine that will null check obj and then lock it. */
2057void Mir2Lir::GenMonitorEnter(int opt_flags, RegLocation rl_src) {
2058 FlushAllRegs();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002059 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(4, pLockObject), rl_src, true);
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002060}
2061
2062/* Call out to helper assembly routine that will null check obj and then unlock it. */
2063void Mir2Lir::GenMonitorExit(int opt_flags, RegLocation rl_src) {
2064 FlushAllRegs();
Ian Rogersdd7624d2014-03-14 17:43:00 -07002065 CallRuntimeHelperRegLocation(QUICK_ENTRYPOINT_OFFSET(4, pUnlockObject), rl_src, true);
Ian Rogersd9c4fc92013-10-01 19:45:43 -07002066}
2067
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002068/* Generic code for generating a wide constant into a VR. */
2069void Mir2Lir::GenConstWide(RegLocation rl_dest, int64_t value) {
2070 RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -08002071 LoadConstantWide(rl_result.reg, value);
Bill Buzbeed61ba4b2014-01-13 21:44:01 +00002072 StoreValueWide(rl_dest, rl_result);
2073}
2074
Brian Carlstrom7940e442013-07-12 13:46:57 -07002075} // namespace art