blob: 0b3ac204ff475dec11e6caca33178e65bf38ebb6 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000018
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010019#include "arch/arm/asm_support_arm.h"
Calin Juravle34166012014-12-19 17:22:29 +000020#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Zheng Xuc6667102015-05-15 16:08:45 +080024#include "code_generator_utils.h"
Anton Kirilov74234da2017-01-13 14:42:47 +000025#include "common_arm.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070027#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010028#include "gc/accounting/card_table.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080029#include "intrinsics.h"
30#include "intrinsics_arm.h"
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010031#include "linker/arm/relative_patcher_thumb2.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070034#include "thread.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035#include "utils/arm/assembler_arm.h"
36#include "utils/arm/managed_register_arm.h"
Roland Levillain946e1432014-11-11 17:35:19 +000037#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +000039
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000040namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010041
Roland Levillain3b359c72015-11-17 19:35:12 +000042template<class MirrorType>
43class GcRoot;
44
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000045namespace arm {
46
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +000047static bool ExpectedPairLayout(Location location) {
48 // We expected this for both core and fpu register pairs.
49 return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
50}
51
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010052static constexpr Register kMethodRegisterArgument = R0;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010053
David Brazdil58282f42016-01-14 12:45:10 +000054static constexpr Register kCoreAlwaysSpillRegister = R5;
Nicolas Geoffray4dee6362015-01-23 18:23:14 +000055static constexpr Register kCoreCalleeSaves[] =
Andreas Gampe501fd632015-09-10 16:11:06 -070056 { R5, R6, R7, R8, R10, R11, LR };
Nicolas Geoffray4dee6362015-01-23 18:23:14 +000057static constexpr SRegister kFpuCalleeSaves[] =
58 { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010059
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +000060// D31 cannot be split into two S registers, and the register allocator only works on
61// S registers. Therefore there is no need to block it.
62static constexpr DRegister DTMP = D31;
63
Vladimir Markof3e0ee22015-12-17 15:23:13 +000064static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Andreas Gampe7cffc3b2015-10-19 21:31:53 -070065
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010066// Reference load (except object array loads) is using LDR Rt, [Rn, #offset] which can handle
67// offset < 4KiB. For offsets >= 4KiB, the load shall be emitted as two or more instructions.
68// For the Baker read barrier implementation using link-generated thunks we need to split
69// the offset explicitly.
70constexpr uint32_t kReferenceLoadMinFarOffset = 4 * KB;
71
72// Flags controlling the use of link-time generated thunks for Baker read barriers.
73constexpr bool kBakerReadBarrierLinkTimeThunksEnableForFields = true;
74constexpr bool kBakerReadBarrierLinkTimeThunksEnableForArrays = true;
75constexpr bool kBakerReadBarrierLinkTimeThunksEnableForGcRoots = true;
76
77// The reserved entrypoint register for link-time generated thunks.
78const Register kBakerCcEntrypointRegister = R4;
79
Roland Levillain7cbd27f2016-08-11 23:53:33 +010080// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
81#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070082#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010083
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010084static inline void CheckLastTempIsBakerCcEntrypointRegister(HInstruction* instruction) {
85 DCHECK_EQ(static_cast<uint32_t>(kBakerCcEntrypointRegister),
86 linker::Thumb2RelativePatcher::kBakerCcEntrypointRegister);
87 DCHECK_NE(instruction->GetLocations()->GetTempCount(), 0u);
88 DCHECK_EQ(kBakerCcEntrypointRegister,
89 instruction->GetLocations()->GetTemp(
90 instruction->GetLocations()->GetTempCount() - 1u).AsRegister<Register>());
91}
92
93static inline void EmitPlaceholderBne(CodeGeneratorARM* codegen, Label* bne_label) {
Vladimir Marko88abba22017-05-03 17:09:25 +010094 ScopedForce32Bit force_32bit(down_cast<Thumb2Assembler*>(codegen->GetAssembler()));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +010095 __ BindTrackedLabel(bne_label);
96 Label placeholder_label;
97 __ b(&placeholder_label, NE); // Placeholder, patched at link-time.
98 __ Bind(&placeholder_label);
99}
100
Vladimir Marko88abba22017-05-03 17:09:25 +0100101static inline bool CanEmitNarrowLdr(Register rt, Register rn, uint32_t offset) {
102 return ArmAssembler::IsLowRegister(rt) && ArmAssembler::IsLowRegister(rn) && offset < 32u;
103}
104
Artem Serovf4d6aee2016-07-11 10:41:45 +0100105static constexpr int kRegListThreshold = 4;
106
Artem Serovd300d8f2016-07-15 14:00:56 +0100107// SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers,
108// for each live D registers they treat two corresponding S registers as live ones.
109//
110// Two following functions (SaveContiguousSRegisterList, RestoreContiguousSRegisterList) build
111// from a list of contiguous S registers a list of contiguous D registers (processing first/last
112// S registers corner cases) and save/restore this new list treating them as D registers.
113// - decreasing code size
114// - avoiding hazards on Cortex-A57, when a pair of S registers for an actual live D register is
115// restored and then used in regular non SlowPath code as D register.
116//
117// For the following example (v means the S register is live):
118// D names: | D0 | D1 | D2 | D4 | ...
119// S names: | S0 | S1 | S2 | S3 | S4 | S5 | S6 | S7 | ...
120// Live? | | v | v | v | v | v | v | | ...
121//
122// S1 and S6 will be saved/restored independently; D registers list (D1, D2) will be processed
123// as D registers.
124static size_t SaveContiguousSRegisterList(size_t first,
125 size_t last,
126 CodeGenerator* codegen,
127 size_t stack_offset) {
128 DCHECK_LE(first, last);
129 if ((first == last) && (first == 0)) {
130 stack_offset += codegen->SaveFloatingPointRegister(stack_offset, first);
131 return stack_offset;
132 }
133 if (first % 2 == 1) {
134 stack_offset += codegen->SaveFloatingPointRegister(stack_offset, first++);
135 }
136
137 bool save_last = false;
138 if (last % 2 == 0) {
139 save_last = true;
140 --last;
141 }
142
143 if (first < last) {
144 DRegister d_reg = static_cast<DRegister>(first / 2);
145 DCHECK_EQ((last - first + 1) % 2, 0u);
146 size_t number_of_d_regs = (last - first + 1) / 2;
147
148 if (number_of_d_regs == 1) {
Scott Wakelinga7812ae2016-10-17 10:03:36 +0100149 __ StoreDToOffset(d_reg, SP, stack_offset);
Artem Serovd300d8f2016-07-15 14:00:56 +0100150 } else if (number_of_d_regs > 1) {
151 __ add(IP, SP, ShifterOperand(stack_offset));
152 __ vstmiad(IP, d_reg, number_of_d_regs);
153 }
154 stack_offset += number_of_d_regs * kArmWordSize * 2;
155 }
156
157 if (save_last) {
158 stack_offset += codegen->SaveFloatingPointRegister(stack_offset, last + 1);
159 }
160
161 return stack_offset;
162}
163
164static size_t RestoreContiguousSRegisterList(size_t first,
165 size_t last,
166 CodeGenerator* codegen,
167 size_t stack_offset) {
168 DCHECK_LE(first, last);
169 if ((first == last) && (first == 0)) {
170 stack_offset += codegen->RestoreFloatingPointRegister(stack_offset, first);
171 return stack_offset;
172 }
173 if (first % 2 == 1) {
174 stack_offset += codegen->RestoreFloatingPointRegister(stack_offset, first++);
175 }
176
177 bool restore_last = false;
178 if (last % 2 == 0) {
179 restore_last = true;
180 --last;
181 }
182
183 if (first < last) {
184 DRegister d_reg = static_cast<DRegister>(first / 2);
185 DCHECK_EQ((last - first + 1) % 2, 0u);
186 size_t number_of_d_regs = (last - first + 1) / 2;
187 if (number_of_d_regs == 1) {
188 __ LoadDFromOffset(d_reg, SP, stack_offset);
189 } else if (number_of_d_regs > 1) {
190 __ add(IP, SP, ShifterOperand(stack_offset));
191 __ vldmiad(IP, d_reg, number_of_d_regs);
192 }
193 stack_offset += number_of_d_regs * kArmWordSize * 2;
194 }
195
196 if (restore_last) {
197 stack_offset += codegen->RestoreFloatingPointRegister(stack_offset, last + 1);
198 }
199
200 return stack_offset;
201}
202
Artem Serovf4d6aee2016-07-11 10:41:45 +0100203void SlowPathCodeARM::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
204 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
205 size_t orig_offset = stack_offset;
206
207 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
208 for (uint32_t i : LowToHighBits(core_spills)) {
209 // If the register holds an object, update the stack mask.
210 if (locations->RegisterContainsObject(i)) {
211 locations->SetStackBit(stack_offset / kVRegSize);
212 }
213 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
214 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
215 saved_core_stack_offsets_[i] = stack_offset;
216 stack_offset += kArmWordSize;
217 }
218
219 int reg_num = POPCOUNT(core_spills);
220 if (reg_num != 0) {
221 if (reg_num > kRegListThreshold) {
222 __ StoreList(RegList(core_spills), orig_offset);
223 } else {
224 stack_offset = orig_offset;
225 for (uint32_t i : LowToHighBits(core_spills)) {
226 stack_offset += codegen->SaveCoreRegister(stack_offset, i);
227 }
228 }
229 }
230
Artem Serovd300d8f2016-07-15 14:00:56 +0100231 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
232 orig_offset = stack_offset;
Vladimir Marko804b03f2016-09-14 16:26:36 +0100233 for (uint32_t i : LowToHighBits(fp_spills)) {
Artem Serovf4d6aee2016-07-11 10:41:45 +0100234 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
235 saved_fpu_stack_offsets_[i] = stack_offset;
Artem Serovd300d8f2016-07-15 14:00:56 +0100236 stack_offset += kArmWordSize;
Artem Serovf4d6aee2016-07-11 10:41:45 +0100237 }
Artem Serovd300d8f2016-07-15 14:00:56 +0100238
239 stack_offset = orig_offset;
240 while (fp_spills != 0u) {
241 uint32_t begin = CTZ(fp_spills);
242 uint32_t tmp = fp_spills + (1u << begin);
243 fp_spills &= tmp; // Clear the contiguous range of 1s.
244 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
245 stack_offset = SaveContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
246 }
247 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
Artem Serovf4d6aee2016-07-11 10:41:45 +0100248}
249
250void SlowPathCodeARM::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
251 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
252 size_t orig_offset = stack_offset;
253
254 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
255 for (uint32_t i : LowToHighBits(core_spills)) {
256 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
257 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
258 stack_offset += kArmWordSize;
259 }
260
261 int reg_num = POPCOUNT(core_spills);
262 if (reg_num != 0) {
263 if (reg_num > kRegListThreshold) {
264 __ LoadList(RegList(core_spills), orig_offset);
265 } else {
266 stack_offset = orig_offset;
267 for (uint32_t i : LowToHighBits(core_spills)) {
268 stack_offset += codegen->RestoreCoreRegister(stack_offset, i);
269 }
270 }
271 }
272
Artem Serovd300d8f2016-07-15 14:00:56 +0100273 uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
274 while (fp_spills != 0u) {
275 uint32_t begin = CTZ(fp_spills);
276 uint32_t tmp = fp_spills + (1u << begin);
277 fp_spills &= tmp; // Clear the contiguous range of 1s.
278 uint32_t end = (tmp == 0u) ? 32u : CTZ(tmp); // CTZ(0) is undefined.
279 stack_offset = RestoreContiguousSRegisterList(begin, end - 1, codegen, stack_offset);
Artem Serovf4d6aee2016-07-11 10:41:45 +0100280 }
Artem Serovd300d8f2016-07-15 14:00:56 +0100281 DCHECK_LE(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
Artem Serovf4d6aee2016-07-11 10:41:45 +0100282}
283
284class NullCheckSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100285 public:
Artem Serovf4d6aee2016-07-11 10:41:45 +0100286 explicit NullCheckSlowPathARM(HNullCheck* instruction) : SlowPathCodeARM(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100287
Alexandre Rames67555f72014-11-18 10:55:16 +0000288 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100290 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000291 if (instruction_->CanThrowIntoCatchBlock()) {
292 // Live registers will be restored in the catch block if caught.
293 SaveLiveRegisters(codegen, instruction_->GetLocations());
294 }
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100295 arm_codegen->InvokeRuntime(kQuickThrowNullPointer,
296 instruction_,
297 instruction_->GetDexPc(),
298 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000299 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100300 }
301
Alexandre Rames8158f282015-08-07 10:26:17 +0100302 bool IsFatal() const OVERRIDE { return true; }
303
Alexandre Rames9931f312015-06-19 14:47:01 +0100304 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM"; }
305
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100306 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100307 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
308};
309
Artem Serovf4d6aee2016-07-11 10:41:45 +0100310class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
Calin Juravled0d48522014-11-04 16:40:20 +0000311 public:
Artem Serovf4d6aee2016-07-11 10:41:45 +0100312 explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : SlowPathCodeARM(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000313
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000315 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
316 __ Bind(GetEntryLabel());
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100317 arm_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000318 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000319 }
320
Alexandre Rames8158f282015-08-07 10:26:17 +0100321 bool IsFatal() const OVERRIDE { return true; }
322
Alexandre Rames9931f312015-06-19 14:47:01 +0100323 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM"; }
324
Calin Juravled0d48522014-11-04 16:40:20 +0000325 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000326 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
327};
328
Artem Serovf4d6aee2016-07-11 10:41:45 +0100329class SuspendCheckSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000330 public:
Alexandre Rames67555f72014-11-18 10:55:16 +0000331 SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
Artem Serovf4d6aee2016-07-11 10:41:45 +0100332 : SlowPathCodeARM(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000333
Alexandre Rames67555f72014-11-18 10:55:16 +0000334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100335 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000336 __ Bind(GetEntryLabel());
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100337 arm_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000338 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100339 if (successor_ == nullptr) {
340 __ b(GetReturnLabel());
341 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100342 __ b(arm_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100343 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000344 }
345
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100346 Label* GetReturnLabel() {
347 DCHECK(successor_ == nullptr);
348 return &return_label_;
349 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000350
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100351 HBasicBlock* GetSuccessor() const {
352 return successor_;
353 }
354
Alexandre Rames9931f312015-06-19 14:47:01 +0100355 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM"; }
356
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000357 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100358 // If not null, the block to branch to after the suspend check.
359 HBasicBlock* const successor_;
360
361 // If `successor_` is null, the label to branch to after the suspend check.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000362 Label return_label_;
363
364 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
365};
366
Artem Serovf4d6aee2016-07-11 10:41:45 +0100367class BoundsCheckSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100368 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100369 explicit BoundsCheckSlowPathARM(HBoundsCheck* instruction)
Artem Serovf4d6aee2016-07-11 10:41:45 +0100370 : SlowPathCodeARM(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100371
Alexandre Rames67555f72014-11-18 10:55:16 +0000372 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100373 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100374 LocationSummary* locations = instruction_->GetLocations();
375
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100376 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000377 if (instruction_->CanThrowIntoCatchBlock()) {
378 // Live registers will be restored in the catch block if caught.
379 SaveLiveRegisters(codegen, instruction_->GetLocations());
380 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000381 // We're moving two locations to locations that could overlap, so we need a parallel
382 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100383 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000384 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100385 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000386 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100387 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100388 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100389 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
390 Primitive::kPrimInt);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100391 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
392 ? kQuickThrowStringBounds
393 : kQuickThrowArrayBounds;
394 arm_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100395 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000396 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100397 }
398
Alexandre Rames8158f282015-08-07 10:26:17 +0100399 bool IsFatal() const OVERRIDE { return true; }
400
Alexandre Rames9931f312015-06-19 14:47:01 +0100401 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM"; }
402
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100403 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100404 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
405};
406
Artem Serovf4d6aee2016-07-11 10:41:45 +0100407class LoadClassSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100408 public:
Vladimir Markoea4c1262017-02-06 19:59:33 +0000409 LoadClassSlowPathARM(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000410 : SlowPathCodeARM(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000411 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
412 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100413
Alexandre Rames67555f72014-11-18 10:55:16 +0000414 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000415 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000416 Location out = locations->Out();
417 constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000418
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100419 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
420 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000421 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100422
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100423 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoea4c1262017-02-06 19:59:33 +0000424 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
425 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
426 bool is_load_class_bss_entry =
427 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
428 Register entry_address = kNoRegister;
429 if (is_load_class_bss_entry && call_saves_everything_except_r0) {
430 Register temp = locations->GetTemp(0).AsRegister<Register>();
431 // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
432 // the kSaveEverything call.
433 bool temp_is_r0 = (temp == calling_convention.GetRegisterAt(0));
434 entry_address = temp_is_r0 ? out.AsRegister<Register>() : temp;
435 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
436 if (temp_is_r0) {
437 __ mov(entry_address, ShifterOperand(temp));
438 }
439 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000440 dex::TypeIndex type_index = cls_->GetTypeIndex();
441 __ LoadImmediate(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100442 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
443 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000444 arm_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000445 if (do_clinit_) {
446 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
447 } else {
448 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
449 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000450
Vladimir Markoea4c1262017-02-06 19:59:33 +0000451 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
452 if (is_load_class_bss_entry) {
453 if (call_saves_everything_except_r0) {
454 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
455 __ str(R0, Address(entry_address));
456 } else {
457 // For non-Baker read barrier, we need to re-calculate the address of the string entry.
458 Register temp = IP;
459 CodeGeneratorARM::PcRelativePatchInfo* labels =
460 arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
461 __ BindTrackedLabel(&labels->movw_label);
462 __ movw(temp, /* placeholder */ 0u);
463 __ BindTrackedLabel(&labels->movt_label);
464 __ movt(temp, /* placeholder */ 0u);
465 __ BindTrackedLabel(&labels->add_pc_label);
466 __ add(temp, temp, ShifterOperand(PC));
467 __ str(R0, Address(temp));
468 }
469 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000470 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000471 if (out.IsValid()) {
472 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000473 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
474 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000475 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100476 __ b(GetExitLabel());
477 }
478
Alexandre Rames9931f312015-06-19 14:47:01 +0100479 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM"; }
480
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100481 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000482 // The class this slow path will load.
483 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100484
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000485 // The dex PC of `at_`.
486 const uint32_t dex_pc_;
487
488 // Whether to initialize the class.
489 const bool do_clinit_;
490
491 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100492};
493
Vladimir Markoaad75c62016-10-03 08:46:48 +0000494class LoadStringSlowPathARM : public SlowPathCodeARM {
495 public:
496 explicit LoadStringSlowPathARM(HLoadString* instruction) : SlowPathCodeARM(instruction) {}
497
498 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Markoea4c1262017-02-06 19:59:33 +0000499 DCHECK(instruction_->IsLoadString());
500 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000501 LocationSummary* locations = instruction_->GetLocations();
502 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100503 HLoadString* load = instruction_->AsLoadString();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000504 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100505 Register out = locations->Out().AsRegister<Register>();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100506 constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000507
508 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
509 __ Bind(GetEntryLabel());
510 SaveLiveRegisters(codegen, locations);
511
512 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100513 // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
Vladimir Markoea4c1262017-02-06 19:59:33 +0000514 // the kSaveEverything call.
515 Register entry_address = kNoRegister;
516 if (call_saves_everything_except_r0) {
517 Register temp = locations->GetTemp(0).AsRegister<Register>();
518 bool temp_is_r0 = (temp == calling_convention.GetRegisterAt(0));
519 entry_address = temp_is_r0 ? out : temp;
520 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
521 if (temp_is_r0) {
522 __ mov(entry_address, ShifterOperand(temp));
523 }
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100524 }
525
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000526 __ LoadImmediate(calling_convention.GetRegisterAt(0), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000527 arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
528 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100529
530 // Store the resolved String to the .bss entry.
531 if (call_saves_everything_except_r0) {
532 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
533 __ str(R0, Address(entry_address));
534 } else {
535 // For non-Baker read barrier, we need to re-calculate the address of the string entry.
Vladimir Markoea4c1262017-02-06 19:59:33 +0000536 Register temp = IP;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100537 CodeGeneratorARM::PcRelativePatchInfo* labels =
538 arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
539 __ BindTrackedLabel(&labels->movw_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +0000540 __ movw(temp, /* placeholder */ 0u);
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100541 __ BindTrackedLabel(&labels->movt_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +0000542 __ movt(temp, /* placeholder */ 0u);
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100543 __ BindTrackedLabel(&labels->add_pc_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +0000544 __ add(temp, temp, ShifterOperand(PC));
545 __ str(R0, Address(temp));
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100546 }
547
Vladimir Markoaad75c62016-10-03 08:46:48 +0000548 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000549 RestoreLiveRegisters(codegen, locations);
550
Vladimir Markoaad75c62016-10-03 08:46:48 +0000551 __ b(GetExitLabel());
552 }
553
554 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM"; }
555
556 private:
557 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
558};
559
Artem Serovf4d6aee2016-07-11 10:41:45 +0100560class TypeCheckSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000561 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000562 TypeCheckSlowPathARM(HInstruction* instruction, bool is_fatal)
Artem Serovf4d6aee2016-07-11 10:41:45 +0100563 : SlowPathCodeARM(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000564
Alexandre Rames67555f72014-11-18 10:55:16 +0000565 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000566 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000567 DCHECK(instruction_->IsCheckCast()
568 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000569
570 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
571 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000572
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000573 if (!is_fatal_) {
574 SaveLiveRegisters(codegen, locations);
575 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000576
577 // We're moving two locations to locations that could overlap, so we need a parallel
578 // move resolver.
579 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800580 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800581 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
582 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800583 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800584 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
585 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000586 if (instruction_->IsInstanceOf()) {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100587 arm_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100588 instruction_,
589 instruction_->GetDexPc(),
590 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800591 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000592 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
593 } else {
594 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800595 arm_codegen->InvokeRuntime(kQuickCheckInstanceOf,
596 instruction_,
597 instruction_->GetDexPc(),
598 this);
599 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000600 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000601
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000602 if (!is_fatal_) {
603 RestoreLiveRegisters(codegen, locations);
604 __ b(GetExitLabel());
605 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000606 }
607
Alexandre Rames9931f312015-06-19 14:47:01 +0100608 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM"; }
609
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000610 bool IsFatal() const OVERRIDE { return is_fatal_; }
611
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000612 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000613 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000614
615 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
616};
617
Artem Serovf4d6aee2016-07-11 10:41:45 +0100618class DeoptimizationSlowPathARM : public SlowPathCodeARM {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700619 public:
Aart Bik42249c32016-01-07 15:33:50 -0800620 explicit DeoptimizationSlowPathARM(HDeoptimize* instruction)
Artem Serovf4d6aee2016-07-11 10:41:45 +0100621 : SlowPathCodeARM(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700622
623 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800624 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700625 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100626 LocationSummary* locations = instruction_->GetLocations();
627 SaveLiveRegisters(codegen, locations);
628 InvokeRuntimeCallingConvention calling_convention;
629 __ LoadImmediate(calling_convention.GetRegisterAt(0),
630 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100631 arm_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100632 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700633 }
634
Alexandre Rames9931f312015-06-19 14:47:01 +0100635 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM"; }
636
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700637 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700638 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM);
639};
640
Artem Serovf4d6aee2016-07-11 10:41:45 +0100641class ArraySetSlowPathARM : public SlowPathCodeARM {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100642 public:
Artem Serovf4d6aee2016-07-11 10:41:45 +0100643 explicit ArraySetSlowPathARM(HInstruction* instruction) : SlowPathCodeARM(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100644
645 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
646 LocationSummary* locations = instruction_->GetLocations();
647 __ Bind(GetEntryLabel());
648 SaveLiveRegisters(codegen, locations);
649
650 InvokeRuntimeCallingConvention calling_convention;
651 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
652 parallel_move.AddMove(
653 locations->InAt(0),
654 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
655 Primitive::kPrimNot,
656 nullptr);
657 parallel_move.AddMove(
658 locations->InAt(1),
659 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
660 Primitive::kPrimInt,
661 nullptr);
662 parallel_move.AddMove(
663 locations->InAt(2),
664 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
665 Primitive::kPrimNot,
666 nullptr);
667 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
668
669 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +0100670 arm_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000671 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100672 RestoreLiveRegisters(codegen, locations);
673 __ b(GetExitLabel());
674 }
675
676 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM"; }
677
678 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100679 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM);
680};
681
Roland Levillain54f869e2017-03-06 13:54:11 +0000682// Abstract base class for read barrier slow paths marking a reference
683// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000684//
Roland Levillain54f869e2017-03-06 13:54:11 +0000685// Argument `entrypoint` must be a register location holding the read
686// barrier marking runtime entry point to be invoked.
687class ReadBarrierMarkSlowPathBaseARM : public SlowPathCodeARM {
688 protected:
689 ReadBarrierMarkSlowPathBaseARM(HInstruction* instruction, Location ref, Location entrypoint)
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000690 : SlowPathCodeARM(instruction), ref_(ref), entrypoint_(entrypoint) {
691 DCHECK(kEmitCompilerReadBarrier);
692 }
693
Roland Levillain54f869e2017-03-06 13:54:11 +0000694 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000695
Roland Levillain54f869e2017-03-06 13:54:11 +0000696 // Generate assembly code calling the read barrier marking runtime
697 // entry point (ReadBarrierMarkRegX).
698 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000699 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain47b3ab22017-02-27 14:31:35 +0000700
Roland Levillain47b3ab22017-02-27 14:31:35 +0000701 // No need to save live registers; it's taken care of by the
702 // entrypoint. Also, there is no need to update the stack mask,
703 // as this runtime call will not trigger a garbage collection.
704 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
705 DCHECK_NE(ref_reg, SP);
706 DCHECK_NE(ref_reg, LR);
707 DCHECK_NE(ref_reg, PC);
708 // IP is used internally by the ReadBarrierMarkRegX entry point
709 // as a temporary, it cannot be the entry point's input/output.
710 DCHECK_NE(ref_reg, IP);
711 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCoreRegisters) << ref_reg;
712 // "Compact" slow path, saving two moves.
713 //
714 // Instead of using the standard runtime calling convention (input
715 // and output in R0):
716 //
717 // R0 <- ref
718 // R0 <- ReadBarrierMark(R0)
719 // ref <- R0
720 //
721 // we just use rX (the register containing `ref`) as input and output
722 // of a dedicated entrypoint:
723 //
724 // rX <- ReadBarrierMarkRegX(rX)
725 //
726 if (entrypoint_.IsValid()) {
727 arm_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
728 __ blx(entrypoint_.AsRegister<Register>());
729 } else {
Roland Levillain54f869e2017-03-06 13:54:11 +0000730 // Entrypoint is not already loaded, load from the thread.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000731 int32_t entry_point_offset =
732 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ref_reg);
733 // This runtime call does not require a stack map.
734 arm_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
735 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000736 }
737
738 // The location (register) of the marked object reference.
739 const Location ref_;
740
741 // The location of the entrypoint if it is already loaded.
742 const Location entrypoint_;
743
744 private:
745 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM);
746};
747
Dave Allison20dfc792014-06-16 20:44:29 -0700748// Slow path marking an object reference `ref` during a read
749// barrier. The field `obj.field` in the object `obj` holding this
Roland Levillain54f869e2017-03-06 13:54:11 +0000750// reference does not get updated by this slow path after marking.
Dave Allison20dfc792014-06-16 20:44:29 -0700751//
752// This means that after the execution of this slow path, `ref` will
753// always be up-to-date, but `obj.field` may not; i.e., after the
754// flip, `ref` will be a to-space reference, but `obj.field` will
755// probably still be a from-space reference (unless it gets updated by
756// another thread, or if another thread installed another object
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000757// reference (different from `ref`) in `obj.field`).
758//
759// If `entrypoint` is a valid location it is assumed to already be
760// holding the entrypoint. The case where the entrypoint is passed in
Roland Levillainba650a42017-03-06 13:52:32 +0000761// is when the decision to mark is based on whether the GC is marking.
Roland Levillain54f869e2017-03-06 13:54:11 +0000762class ReadBarrierMarkSlowPathARM : public ReadBarrierMarkSlowPathBaseARM {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000763 public:
764 ReadBarrierMarkSlowPathARM(HInstruction* instruction,
765 Location ref,
766 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000767 : ReadBarrierMarkSlowPathBaseARM(instruction, ref, entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000768 DCHECK(kEmitCompilerReadBarrier);
769 }
770
771 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM"; }
772
773 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
774 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain54f869e2017-03-06 13:54:11 +0000775 DCHECK(locations->CanCall());
776 if (kIsDebugBuild) {
777 Register ref_reg = ref_.AsRegister<Register>();
778 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
779 }
780 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
781 << "Unexpected instruction in read barrier marking slow path: "
782 << instruction_->DebugName();
783
784 __ Bind(GetEntryLabel());
785 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000786 __ b(GetExitLabel());
787 }
788
789 private:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000790 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM);
791};
792
Roland Levillain54f869e2017-03-06 13:54:11 +0000793// Slow path loading `obj`'s lock word, loading a reference from
794// object `*(obj + offset + (index << scale_factor))` into `ref`, and
795// marking `ref` if `obj` is gray according to the lock word (Baker
796// read barrier). The field `obj.field` in the object `obj` holding
797// this reference does not get updated by this slow path after marking
798// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM
799// below for that).
Roland Levillain47b3ab22017-02-27 14:31:35 +0000800//
Roland Levillain54f869e2017-03-06 13:54:11 +0000801// This means that after the execution of this slow path, `ref` will
802// always be up-to-date, but `obj.field` may not; i.e., after the
803// flip, `ref` will be a to-space reference, but `obj.field` will
804// probably still be a from-space reference (unless it gets updated by
805// another thread, or if another thread installed another object
806// reference (different from `ref`) in `obj.field`).
807//
808// Argument `entrypoint` must be a register location holding the read
809// barrier marking runtime entry point to be invoked.
810class LoadReferenceWithBakerReadBarrierSlowPathARM : public ReadBarrierMarkSlowPathBaseARM {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000811 public:
Roland Levillain54f869e2017-03-06 13:54:11 +0000812 LoadReferenceWithBakerReadBarrierSlowPathARM(HInstruction* instruction,
813 Location ref,
814 Register obj,
815 uint32_t offset,
816 Location index,
817 ScaleFactor scale_factor,
818 bool needs_null_check,
819 Register temp,
820 Location entrypoint)
821 : ReadBarrierMarkSlowPathBaseARM(instruction, ref, entrypoint),
Roland Levillain47b3ab22017-02-27 14:31:35 +0000822 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000823 offset_(offset),
824 index_(index),
825 scale_factor_(scale_factor),
826 needs_null_check_(needs_null_check),
827 temp_(temp) {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000828 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000829 DCHECK(kUseBakerReadBarrier);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000830 }
831
Roland Levillain54f869e2017-03-06 13:54:11 +0000832 const char* GetDescription() const OVERRIDE {
833 return "LoadReferenceWithBakerReadBarrierSlowPathARM";
834 }
Roland Levillain47b3ab22017-02-27 14:31:35 +0000835
836 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
837 LocationSummary* locations = instruction_->GetLocations();
838 Register ref_reg = ref_.AsRegister<Register>();
839 DCHECK(locations->CanCall());
840 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain54f869e2017-03-06 13:54:11 +0000841 DCHECK_NE(ref_reg, temp_);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000842 DCHECK(instruction_->IsInstanceFieldGet() ||
843 instruction_->IsStaticFieldGet() ||
844 instruction_->IsArrayGet() ||
845 instruction_->IsArraySet() ||
Roland Levillain47b3ab22017-02-27 14:31:35 +0000846 instruction_->IsInstanceOf() ||
847 instruction_->IsCheckCast() ||
848 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
849 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
850 << "Unexpected instruction in read barrier marking slow path: "
851 << instruction_->DebugName();
852 // The read barrier instrumentation of object ArrayGet
853 // instructions does not support the HIntermediateAddress
854 // instruction.
855 DCHECK(!(instruction_->IsArrayGet() &&
856 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
857
858 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000859
860 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
861 // inserted after the original load. However, in fast path based
862 // Baker's read barriers, we need to perform the load of
863 // mirror::Object::monitor_ *before* the original reference load.
864 // This load-load ordering is required by the read barrier.
Roland Levillainff487002017-03-07 16:50:01 +0000865 // The slow path (for Baker's algorithm) should look like:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000866 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000867 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
868 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
869 // HeapReference<mirror::Object> ref = *src; // Original reference load.
870 // bool is_gray = (rb_state == ReadBarrier::GrayState());
871 // if (is_gray) {
872 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
873 // }
Roland Levillain47b3ab22017-02-27 14:31:35 +0000874 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000875 // Note: the original implementation in ReadBarrier::Barrier is
876 // slightly more complex as it performs additional checks that we do
877 // not do here for performance reasons.
878
879 // /* int32_t */ monitor = obj->monitor_
880 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
881 __ LoadFromOffset(kLoadWord, temp_, obj_, monitor_offset);
882 if (needs_null_check_) {
883 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000884 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000885 // /* LockWord */ lock_word = LockWord(monitor)
886 static_assert(sizeof(LockWord) == sizeof(int32_t),
887 "art::LockWord and int32_t have different sizes.");
888
889 // Introduce a dependency on the lock_word including the rb_state,
890 // which shall prevent load-load reordering without using
891 // a memory barrier (which would be more expensive).
892 // `obj` is unchanged by this operation, but its value now depends
893 // on `temp`.
894 __ add(obj_, obj_, ShifterOperand(temp_, LSR, 32));
895
896 // The actual reference load.
897 // A possible implicit null check has already been handled above.
898 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
899 arm_codegen->GenerateRawReferenceLoad(
900 instruction_, ref_, obj_, offset_, index_, scale_factor_, /* needs_null_check */ false);
901
902 // Mark the object `ref` when `obj` is gray.
903 //
904 // if (rb_state == ReadBarrier::GrayState())
905 // ref = ReadBarrier::Mark(ref);
906 //
907 // Given the numeric representation, it's enough to check the low bit of the
908 // rb_state. We do that by shifting the bit out of the lock word with LSRS
909 // which can be a 16-bit instruction unlike the TST immediate.
910 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
911 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
912 __ Lsrs(temp_, temp_, LockWord::kReadBarrierStateShift + 1);
913 __ b(GetExitLabel(), CC); // Carry flag is the last bit shifted out by LSRS.
914 GenerateReadBarrierMarkRuntimeCall(codegen);
915
Roland Levillain47b3ab22017-02-27 14:31:35 +0000916 __ b(GetExitLabel());
917 }
918
919 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000920 // The register containing the object holding the marked object reference field.
921 Register obj_;
922 // The offset, index and scale factor to access the reference in `obj_`.
923 uint32_t offset_;
924 Location index_;
925 ScaleFactor scale_factor_;
926 // Is a null check required?
927 bool needs_null_check_;
928 // A temporary register used to hold the lock word of `obj_`.
929 Register temp_;
Roland Levillain47b3ab22017-02-27 14:31:35 +0000930
Roland Levillain54f869e2017-03-06 13:54:11 +0000931 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000932};
933
Roland Levillain54f869e2017-03-06 13:54:11 +0000934// Slow path loading `obj`'s lock word, loading a reference from
935// object `*(obj + offset + (index << scale_factor))` into `ref`, and
936// marking `ref` if `obj` is gray according to the lock word (Baker
937// read barrier). If needed, this slow path also atomically updates
938// the field `obj.field` in the object `obj` holding this reference
939// after marking (contrary to
940// LoadReferenceWithBakerReadBarrierSlowPathARM above, which never
941// tries to update `obj.field`).
Roland Levillain47b3ab22017-02-27 14:31:35 +0000942//
943// This means that after the execution of this slow path, both `ref`
944// and `obj.field` will be up-to-date; i.e., after the flip, both will
945// hold the same to-space reference (unless another thread installed
946// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000947//
Roland Levillain54f869e2017-03-06 13:54:11 +0000948// Argument `entrypoint` must be a register location holding the read
949// barrier marking runtime entry point to be invoked.
950class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM
951 : public ReadBarrierMarkSlowPathBaseARM {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000952 public:
Roland Levillain54f869e2017-03-06 13:54:11 +0000953 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM(HInstruction* instruction,
954 Location ref,
955 Register obj,
956 uint32_t offset,
957 Location index,
958 ScaleFactor scale_factor,
959 bool needs_null_check,
960 Register temp1,
961 Register temp2,
962 Location entrypoint)
963 : ReadBarrierMarkSlowPathBaseARM(instruction, ref, entrypoint),
Roland Levillain47b3ab22017-02-27 14:31:35 +0000964 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000965 offset_(offset),
966 index_(index),
967 scale_factor_(scale_factor),
968 needs_null_check_(needs_null_check),
Roland Levillain47b3ab22017-02-27 14:31:35 +0000969 temp1_(temp1),
Roland Levillain54f869e2017-03-06 13:54:11 +0000970 temp2_(temp2) {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000971 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000972 DCHECK(kUseBakerReadBarrier);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000973 }
974
Roland Levillain54f869e2017-03-06 13:54:11 +0000975 const char* GetDescription() const OVERRIDE {
976 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM";
977 }
Roland Levillain47b3ab22017-02-27 14:31:35 +0000978
979 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
980 LocationSummary* locations = instruction_->GetLocations();
981 Register ref_reg = ref_.AsRegister<Register>();
982 DCHECK(locations->CanCall());
983 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain54f869e2017-03-06 13:54:11 +0000984 DCHECK_NE(ref_reg, temp1_);
985
986 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000987 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
988 << "Unexpected instruction in read barrier marking and field updating slow path: "
989 << instruction_->DebugName();
990 DCHECK(instruction_->GetLocations()->Intrinsified());
991 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000992 DCHECK_EQ(offset_, 0u);
993 DCHECK_EQ(scale_factor_, ScaleFactor::TIMES_1);
994 // The location of the offset of the marked reference field within `obj_`.
995 Location field_offset = index_;
996 DCHECK(field_offset.IsRegisterPair()) << field_offset;
Roland Levillain47b3ab22017-02-27 14:31:35 +0000997
998 __ Bind(GetEntryLabel());
999
Roland Levillainff487002017-03-07 16:50:01 +00001000 // The implementation is similar to LoadReferenceWithBakerReadBarrierSlowPathARM's:
1001 //
1002 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
1003 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
1004 // HeapReference<mirror::Object> ref = *src; // Original reference load.
1005 // bool is_gray = (rb_state == ReadBarrier::GrayState());
1006 // if (is_gray) {
1007 // old_ref = ref;
1008 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
1009 // compareAndSwapObject(obj, field_offset, old_ref, ref);
1010 // }
1011
Roland Levillain54f869e2017-03-06 13:54:11 +00001012 // /* int32_t */ monitor = obj->monitor_
1013 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
1014 __ LoadFromOffset(kLoadWord, temp1_, obj_, monitor_offset);
1015 if (needs_null_check_) {
1016 codegen->MaybeRecordImplicitNullCheck(instruction_);
1017 }
1018 // /* LockWord */ lock_word = LockWord(monitor)
1019 static_assert(sizeof(LockWord) == sizeof(int32_t),
1020 "art::LockWord and int32_t have different sizes.");
1021
1022 // Introduce a dependency on the lock_word including the rb_state,
1023 // which shall prevent load-load reordering without using
1024 // a memory barrier (which would be more expensive).
1025 // `obj` is unchanged by this operation, but its value now depends
1026 // on `temp1`.
1027 __ add(obj_, obj_, ShifterOperand(temp1_, LSR, 32));
1028
1029 // The actual reference load.
1030 // A possible implicit null check has already been handled above.
1031 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
1032 arm_codegen->GenerateRawReferenceLoad(
1033 instruction_, ref_, obj_, offset_, index_, scale_factor_, /* needs_null_check */ false);
1034
1035 // Mark the object `ref` when `obj` is gray.
1036 //
1037 // if (rb_state == ReadBarrier::GrayState())
1038 // ref = ReadBarrier::Mark(ref);
1039 //
1040 // Given the numeric representation, it's enough to check the low bit of the
1041 // rb_state. We do that by shifting the bit out of the lock word with LSRS
1042 // which can be a 16-bit instruction unlike the TST immediate.
1043 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
1044 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
1045 __ Lsrs(temp1_, temp1_, LockWord::kReadBarrierStateShift + 1);
1046 __ b(GetExitLabel(), CC); // Carry flag is the last bit shifted out by LSRS.
1047
1048 // Save the old value of the reference before marking it.
Roland Levillain47b3ab22017-02-27 14:31:35 +00001049 // Note that we cannot use IP to save the old reference, as IP is
1050 // used internally by the ReadBarrierMarkRegX entry point, and we
1051 // need the old reference after the call to that entry point.
1052 DCHECK_NE(temp1_, IP);
1053 __ Mov(temp1_, ref_reg);
Roland Levillain27b1f9c2017-01-17 16:56:34 +00001054
Roland Levillain54f869e2017-03-06 13:54:11 +00001055 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001056
1057 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +00001058 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001059 //
1060 // Note that this field could also hold a different object, if
1061 // another thread had concurrently changed it. In that case, the
1062 // LDREX/SUBS/ITNE sequence of instructions in the compare-and-set
1063 // (CAS) operation below would abort the CAS, leaving the field
1064 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001065 __ cmp(temp1_, ShifterOperand(ref_reg));
Roland Levillain54f869e2017-03-06 13:54:11 +00001066 __ b(GetExitLabel(), EQ);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001067
1068 // Update the the holder's field atomically. This may fail if
1069 // mutator updates before us, but it's OK. This is achieved
1070 // using a strong compare-and-set (CAS) operation with relaxed
1071 // memory synchronization ordering, where the expected value is
1072 // the old reference and the desired value is the new reference.
1073
1074 // Convenience aliases.
1075 Register base = obj_;
1076 // The UnsafeCASObject intrinsic uses a register pair as field
1077 // offset ("long offset"), of which only the low part contains
1078 // data.
Roland Levillain54f869e2017-03-06 13:54:11 +00001079 Register offset = field_offset.AsRegisterPairLow<Register>();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001080 Register expected = temp1_;
1081 Register value = ref_reg;
1082 Register tmp_ptr = IP; // Pointer to actual memory.
1083 Register tmp = temp2_; // Value in memory.
1084
1085 __ add(tmp_ptr, base, ShifterOperand(offset));
1086
1087 if (kPoisonHeapReferences) {
1088 __ PoisonHeapReference(expected);
1089 if (value == expected) {
1090 // Do not poison `value`, as it is the same register as
1091 // `expected`, which has just been poisoned.
1092 } else {
1093 __ PoisonHeapReference(value);
1094 }
1095 }
1096
1097 // do {
1098 // tmp = [r_ptr] - expected;
1099 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
1100
Roland Levillain24a4d112016-10-26 13:10:46 +01001101 Label loop_head, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001102 __ Bind(&loop_head);
1103
1104 __ ldrex(tmp, tmp_ptr);
1105
1106 __ subs(tmp, tmp, ShifterOperand(expected));
1107
Roland Levillain24a4d112016-10-26 13:10:46 +01001108 __ it(NE);
1109 __ clrex(NE);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001110
Roland Levillain24a4d112016-10-26 13:10:46 +01001111 __ b(&exit_loop, NE);
1112
1113 __ strex(tmp, value, tmp_ptr);
1114 __ cmp(tmp, ShifterOperand(1));
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001115 __ b(&loop_head, EQ);
1116
Roland Levillain24a4d112016-10-26 13:10:46 +01001117 __ Bind(&exit_loop);
1118
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001119 if (kPoisonHeapReferences) {
1120 __ UnpoisonHeapReference(expected);
1121 if (value == expected) {
1122 // Do not unpoison `value`, as it is the same register as
1123 // `expected`, which has just been unpoisoned.
1124 } else {
1125 __ UnpoisonHeapReference(value);
1126 }
1127 }
1128
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001129 __ b(GetExitLabel());
1130 }
1131
1132 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001133 // The register containing the object holding the marked object reference field.
1134 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001135 // The offset, index and scale factor to access the reference in `obj_`.
1136 uint32_t offset_;
1137 Location index_;
1138 ScaleFactor scale_factor_;
1139 // Is a null check required?
1140 bool needs_null_check_;
1141 // A temporary register used to hold the lock word of `obj_`; and
1142 // also to hold the original reference value, when the reference is
1143 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001144 const Register temp1_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001145 // A temporary register used in the implementation of the CAS, to
1146 // update the object's reference field.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001147 const Register temp2_;
1148
Roland Levillain54f869e2017-03-06 13:54:11 +00001149 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001150};
1151
Roland Levillain3b359c72015-11-17 19:35:12 +00001152// Slow path generating a read barrier for a heap reference.
Artem Serovf4d6aee2016-07-11 10:41:45 +01001153class ReadBarrierForHeapReferenceSlowPathARM : public SlowPathCodeARM {
Roland Levillain3b359c72015-11-17 19:35:12 +00001154 public:
1155 ReadBarrierForHeapReferenceSlowPathARM(HInstruction* instruction,
1156 Location out,
1157 Location ref,
1158 Location obj,
1159 uint32_t offset,
1160 Location index)
Artem Serovf4d6aee2016-07-11 10:41:45 +01001161 : SlowPathCodeARM(instruction),
Roland Levillain3b359c72015-11-17 19:35:12 +00001162 out_(out),
1163 ref_(ref),
1164 obj_(obj),
1165 offset_(offset),
1166 index_(index) {
1167 DCHECK(kEmitCompilerReadBarrier);
1168 // If `obj` is equal to `out` or `ref`, it means the initial object
1169 // has been overwritten by (or after) the heap object reference load
1170 // to be instrumented, e.g.:
1171 //
1172 // __ LoadFromOffset(kLoadWord, out, out, offset);
Roland Levillainc9285912015-12-18 10:38:42 +00001173 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain3b359c72015-11-17 19:35:12 +00001174 //
1175 // In that case, we have lost the information about the original
1176 // object, and the emitted read barrier cannot work properly.
1177 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1178 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1179 }
1180
1181 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1182 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
1183 LocationSummary* locations = instruction_->GetLocations();
1184 Register reg_out = out_.AsRegister<Register>();
1185 DCHECK(locations->CanCall());
1186 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +01001187 DCHECK(instruction_->IsInstanceFieldGet() ||
1188 instruction_->IsStaticFieldGet() ||
1189 instruction_->IsArrayGet() ||
1190 instruction_->IsInstanceOf() ||
1191 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001192 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillainc9285912015-12-18 10:38:42 +00001193 << "Unexpected instruction in read barrier for heap reference slow path: "
1194 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001195 // The read barrier instrumentation of object ArrayGet
1196 // instructions does not support the HIntermediateAddress
1197 // instruction.
1198 DCHECK(!(instruction_->IsArrayGet() &&
1199 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain3b359c72015-11-17 19:35:12 +00001200
1201 __ Bind(GetEntryLabel());
1202 SaveLiveRegisters(codegen, locations);
1203
1204 // We may have to change the index's value, but as `index_` is a
1205 // constant member (like other "inputs" of this slow path),
1206 // introduce a copy of it, `index`.
1207 Location index = index_;
1208 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001209 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain3b359c72015-11-17 19:35:12 +00001210 if (instruction_->IsArrayGet()) {
1211 // Compute the actual memory offset and store it in `index`.
1212 Register index_reg = index_.AsRegister<Register>();
1213 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
1214 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
1215 // We are about to change the value of `index_reg` (see the
1216 // calls to art::arm::Thumb2Assembler::Lsl and
1217 // art::arm::Thumb2Assembler::AddConstant below), but it has
1218 // not been saved by the previous call to
1219 // art::SlowPathCode::SaveLiveRegisters, as it is a
1220 // callee-save register --
1221 // art::SlowPathCode::SaveLiveRegisters does not consider
1222 // callee-save registers, as it has been designed with the
1223 // assumption that callee-save registers are supposed to be
1224 // handled by the called function. So, as a callee-save
1225 // register, `index_reg` _would_ eventually be saved onto
1226 // the stack, but it would be too late: we would have
1227 // changed its value earlier. Therefore, we manually save
1228 // it here into another freely available register,
1229 // `free_reg`, chosen of course among the caller-save
1230 // registers (as a callee-save `free_reg` register would
1231 // exhibit the same problem).
1232 //
1233 // Note we could have requested a temporary register from
1234 // the register allocator instead; but we prefer not to, as
1235 // this is a slow path, and we know we can find a
1236 // caller-save register that is available.
1237 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1238 __ Mov(free_reg, index_reg);
1239 index_reg = free_reg;
1240 index = Location::RegisterLocation(index_reg);
1241 } else {
1242 // The initial register stored in `index_` has already been
1243 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1244 // (as it is not a callee-save register), so we can freely
1245 // use it.
1246 }
1247 // Shifting the index value contained in `index_reg` by the scale
1248 // factor (2) cannot overflow in practice, as the runtime is
1249 // unable to allocate object arrays with a size larger than
1250 // 2^26 - 1 (that is, 2^28 - 4 bytes).
1251 __ Lsl(index_reg, index_reg, TIMES_4);
1252 static_assert(
1253 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1254 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1255 __ AddConstant(index_reg, index_reg, offset_);
1256 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001257 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1258 // intrinsics, `index_` is not shifted by a scale factor of 2
1259 // (as in the case of ArrayGet), as it is actually an offset
1260 // to an object field within an object.
1261 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain3b359c72015-11-17 19:35:12 +00001262 DCHECK(instruction_->GetLocations()->Intrinsified());
1263 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1264 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1265 << instruction_->AsInvoke()->GetIntrinsic();
1266 DCHECK_EQ(offset_, 0U);
1267 DCHECK(index_.IsRegisterPair());
1268 // UnsafeGet's offset location is a register pair, the low
1269 // part contains the correct offset.
1270 index = index_.ToLow();
1271 }
1272 }
1273
1274 // We're moving two or three locations to locations that could
1275 // overlap, so we need a parallel move resolver.
1276 InvokeRuntimeCallingConvention calling_convention;
1277 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
1278 parallel_move.AddMove(ref_,
1279 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1280 Primitive::kPrimNot,
1281 nullptr);
1282 parallel_move.AddMove(obj_,
1283 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
1284 Primitive::kPrimNot,
1285 nullptr);
1286 if (index.IsValid()) {
1287 parallel_move.AddMove(index,
1288 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
1289 Primitive::kPrimInt,
1290 nullptr);
1291 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1292 } else {
1293 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1294 __ LoadImmediate(calling_convention.GetRegisterAt(2), offset_);
1295 }
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001296 arm_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain3b359c72015-11-17 19:35:12 +00001297 CheckEntrypointTypes<
1298 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1299 arm_codegen->Move32(out_, Location::RegisterLocation(R0));
1300
1301 RestoreLiveRegisters(codegen, locations);
1302 __ b(GetExitLabel());
1303 }
1304
1305 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM"; }
1306
1307 private:
1308 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
1309 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1310 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1311 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1312 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1313 return static_cast<Register>(i);
1314 }
1315 }
1316 // We shall never fail to find a free caller-save register, as
1317 // there are more than two core caller-save registers on ARM
1318 // (meaning it is possible to find one which is different from
1319 // `ref` and `obj`).
1320 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1321 LOG(FATAL) << "Could not find a free caller-save register";
1322 UNREACHABLE();
1323 }
1324
Roland Levillain3b359c72015-11-17 19:35:12 +00001325 const Location out_;
1326 const Location ref_;
1327 const Location obj_;
1328 const uint32_t offset_;
1329 // An additional location containing an index to an array.
1330 // Only used for HArrayGet and the UnsafeGetObject &
1331 // UnsafeGetObjectVolatile intrinsics.
1332 const Location index_;
1333
1334 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM);
1335};
1336
1337// Slow path generating a read barrier for a GC root.
Artem Serovf4d6aee2016-07-11 10:41:45 +01001338class ReadBarrierForRootSlowPathARM : public SlowPathCodeARM {
Roland Levillain3b359c72015-11-17 19:35:12 +00001339 public:
1340 ReadBarrierForRootSlowPathARM(HInstruction* instruction, Location out, Location root)
Artem Serovf4d6aee2016-07-11 10:41:45 +01001341 : SlowPathCodeARM(instruction), out_(out), root_(root) {
Roland Levillainc9285912015-12-18 10:38:42 +00001342 DCHECK(kEmitCompilerReadBarrier);
1343 }
Roland Levillain3b359c72015-11-17 19:35:12 +00001344
1345 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1346 LocationSummary* locations = instruction_->GetLocations();
1347 Register reg_out = out_.AsRegister<Register>();
1348 DCHECK(locations->CanCall());
1349 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillainc9285912015-12-18 10:38:42 +00001350 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1351 << "Unexpected instruction in read barrier for GC root slow path: "
1352 << instruction_->DebugName();
Roland Levillain3b359c72015-11-17 19:35:12 +00001353
1354 __ Bind(GetEntryLabel());
1355 SaveLiveRegisters(codegen, locations);
1356
1357 InvokeRuntimeCallingConvention calling_convention;
1358 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
1359 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001360 arm_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain3b359c72015-11-17 19:35:12 +00001361 instruction_,
1362 instruction_->GetDexPc(),
1363 this);
1364 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1365 arm_codegen->Move32(out_, Location::RegisterLocation(R0));
1366
1367 RestoreLiveRegisters(codegen, locations);
1368 __ b(GetExitLabel());
1369 }
1370
1371 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM"; }
1372
1373 private:
Roland Levillain3b359c72015-11-17 19:35:12 +00001374 const Location out_;
1375 const Location root_;
1376
1377 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM);
1378};
1379
Aart Bike9f37602015-10-09 11:15:55 -07001380inline Condition ARMCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -07001381 switch (cond) {
1382 case kCondEQ: return EQ;
1383 case kCondNE: return NE;
1384 case kCondLT: return LT;
1385 case kCondLE: return LE;
1386 case kCondGT: return GT;
1387 case kCondGE: return GE;
Aart Bike9f37602015-10-09 11:15:55 -07001388 case kCondB: return LO;
1389 case kCondBE: return LS;
1390 case kCondA: return HI;
1391 case kCondAE: return HS;
Dave Allison20dfc792014-06-16 20:44:29 -07001392 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001393 LOG(FATAL) << "Unreachable";
1394 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -07001395}
1396
Aart Bike9f37602015-10-09 11:15:55 -07001397// Maps signed condition to unsigned condition.
Roland Levillain4fa13f62015-07-06 18:11:54 +01001398inline Condition ARMUnsignedCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -07001399 switch (cond) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001400 case kCondEQ: return EQ;
1401 case kCondNE: return NE;
Aart Bike9f37602015-10-09 11:15:55 -07001402 // Signed to unsigned.
Roland Levillain4fa13f62015-07-06 18:11:54 +01001403 case kCondLT: return LO;
1404 case kCondLE: return LS;
1405 case kCondGT: return HI;
1406 case kCondGE: return HS;
Aart Bike9f37602015-10-09 11:15:55 -07001407 // Unsigned remain unchanged.
1408 case kCondB: return LO;
1409 case kCondBE: return LS;
1410 case kCondA: return HI;
1411 case kCondAE: return HS;
Dave Allison20dfc792014-06-16 20:44:29 -07001412 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001413 LOG(FATAL) << "Unreachable";
1414 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -07001415}
1416
Vladimir Markod6e069b2016-01-18 11:11:01 +00001417inline Condition ARMFPCondition(IfCondition cond, bool gt_bias) {
1418 // The ARM condition codes can express all the necessary branches, see the
1419 // "Meaning (floating-point)" column in the table A8-1 of the ARMv7 reference manual.
1420 // There is no dex instruction or HIR that would need the missing conditions
1421 // "equal or unordered" or "not equal".
1422 switch (cond) {
1423 case kCondEQ: return EQ;
1424 case kCondNE: return NE /* unordered */;
1425 case kCondLT: return gt_bias ? CC : LT /* unordered */;
1426 case kCondLE: return gt_bias ? LS : LE /* unordered */;
1427 case kCondGT: return gt_bias ? HI /* unordered */ : GT;
1428 case kCondGE: return gt_bias ? CS /* unordered */ : GE;
1429 default:
1430 LOG(FATAL) << "UNREACHABLE";
1431 UNREACHABLE();
1432 }
1433}
1434
Anton Kirilov74234da2017-01-13 14:42:47 +00001435inline Shift ShiftFromOpKind(HDataProcWithShifterOp::OpKind op_kind) {
1436 switch (op_kind) {
1437 case HDataProcWithShifterOp::kASR: return ASR;
1438 case HDataProcWithShifterOp::kLSL: return LSL;
1439 case HDataProcWithShifterOp::kLSR: return LSR;
1440 default:
1441 LOG(FATAL) << "Unexpected op kind " << op_kind;
1442 UNREACHABLE();
1443 }
1444}
1445
1446static void GenerateDataProcInstruction(HInstruction::InstructionKind kind,
1447 Register out,
1448 Register first,
1449 const ShifterOperand& second,
1450 CodeGeneratorARM* codegen) {
1451 if (second.IsImmediate() && second.GetImmediate() == 0) {
1452 const ShifterOperand in = kind == HInstruction::kAnd
1453 ? ShifterOperand(0)
1454 : ShifterOperand(first);
1455
1456 __ mov(out, in);
1457 } else {
1458 switch (kind) {
1459 case HInstruction::kAdd:
1460 __ add(out, first, second);
1461 break;
1462 case HInstruction::kAnd:
1463 __ and_(out, first, second);
1464 break;
1465 case HInstruction::kOr:
1466 __ orr(out, first, second);
1467 break;
1468 case HInstruction::kSub:
1469 __ sub(out, first, second);
1470 break;
1471 case HInstruction::kXor:
1472 __ eor(out, first, second);
1473 break;
1474 default:
1475 LOG(FATAL) << "Unexpected instruction kind: " << kind;
1476 UNREACHABLE();
1477 }
1478 }
1479}
1480
1481static void GenerateDataProc(HInstruction::InstructionKind kind,
1482 const Location& out,
1483 const Location& first,
1484 const ShifterOperand& second_lo,
1485 const ShifterOperand& second_hi,
1486 CodeGeneratorARM* codegen) {
1487 const Register first_hi = first.AsRegisterPairHigh<Register>();
1488 const Register first_lo = first.AsRegisterPairLow<Register>();
1489 const Register out_hi = out.AsRegisterPairHigh<Register>();
1490 const Register out_lo = out.AsRegisterPairLow<Register>();
1491
1492 if (kind == HInstruction::kAdd) {
1493 __ adds(out_lo, first_lo, second_lo);
1494 __ adc(out_hi, first_hi, second_hi);
1495 } else if (kind == HInstruction::kSub) {
1496 __ subs(out_lo, first_lo, second_lo);
1497 __ sbc(out_hi, first_hi, second_hi);
1498 } else {
1499 GenerateDataProcInstruction(kind, out_lo, first_lo, second_lo, codegen);
1500 GenerateDataProcInstruction(kind, out_hi, first_hi, second_hi, codegen);
1501 }
1502}
1503
1504static ShifterOperand GetShifterOperand(Register rm, Shift shift, uint32_t shift_imm) {
1505 return shift_imm == 0 ? ShifterOperand(rm) : ShifterOperand(rm, shift, shift_imm);
1506}
1507
1508static void GenerateLongDataProc(HDataProcWithShifterOp* instruction, CodeGeneratorARM* codegen) {
1509 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
1510 DCHECK(HDataProcWithShifterOp::IsShiftOp(instruction->GetOpKind()));
1511
1512 const LocationSummary* const locations = instruction->GetLocations();
1513 const uint32_t shift_value = instruction->GetShiftAmount();
1514 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
1515 const Location first = locations->InAt(0);
1516 const Location second = locations->InAt(1);
1517 const Location out = locations->Out();
1518 const Register first_hi = first.AsRegisterPairHigh<Register>();
1519 const Register first_lo = first.AsRegisterPairLow<Register>();
1520 const Register out_hi = out.AsRegisterPairHigh<Register>();
1521 const Register out_lo = out.AsRegisterPairLow<Register>();
1522 const Register second_hi = second.AsRegisterPairHigh<Register>();
1523 const Register second_lo = second.AsRegisterPairLow<Register>();
1524 const Shift shift = ShiftFromOpKind(instruction->GetOpKind());
1525
1526 if (shift_value >= 32) {
1527 if (shift == LSL) {
1528 GenerateDataProcInstruction(kind,
1529 out_hi,
1530 first_hi,
1531 ShifterOperand(second_lo, LSL, shift_value - 32),
1532 codegen);
1533 GenerateDataProcInstruction(kind,
1534 out_lo,
1535 first_lo,
1536 ShifterOperand(0),
1537 codegen);
1538 } else if (shift == ASR) {
1539 GenerateDataProc(kind,
1540 out,
1541 first,
1542 GetShifterOperand(second_hi, ASR, shift_value - 32),
1543 ShifterOperand(second_hi, ASR, 31),
1544 codegen);
1545 } else {
1546 DCHECK_EQ(shift, LSR);
1547 GenerateDataProc(kind,
1548 out,
1549 first,
1550 GetShifterOperand(second_hi, LSR, shift_value - 32),
1551 ShifterOperand(0),
1552 codegen);
1553 }
1554 } else {
1555 DCHECK_GT(shift_value, 1U);
1556 DCHECK_LT(shift_value, 32U);
1557
1558 if (shift == LSL) {
1559 // We are not doing this for HInstruction::kAdd because the output will require
1560 // Location::kOutputOverlap; not applicable to other cases.
1561 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1562 GenerateDataProcInstruction(kind,
1563 out_hi,
1564 first_hi,
1565 ShifterOperand(second_hi, LSL, shift_value),
1566 codegen);
1567 GenerateDataProcInstruction(kind,
1568 out_hi,
1569 out_hi,
1570 ShifterOperand(second_lo, LSR, 32 - shift_value),
1571 codegen);
1572 GenerateDataProcInstruction(kind,
1573 out_lo,
1574 first_lo,
1575 ShifterOperand(second_lo, LSL, shift_value),
1576 codegen);
1577 } else {
1578 __ Lsl(IP, second_hi, shift_value);
1579 __ orr(IP, IP, ShifterOperand(second_lo, LSR, 32 - shift_value));
1580 GenerateDataProc(kind,
1581 out,
1582 first,
1583 ShifterOperand(second_lo, LSL, shift_value),
1584 ShifterOperand(IP),
1585 codegen);
1586 }
1587 } else {
1588 DCHECK(shift == ASR || shift == LSR);
1589
1590 // We are not doing this for HInstruction::kAdd because the output will require
1591 // Location::kOutputOverlap; not applicable to other cases.
1592 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
1593 GenerateDataProcInstruction(kind,
1594 out_lo,
1595 first_lo,
1596 ShifterOperand(second_lo, LSR, shift_value),
1597 codegen);
1598 GenerateDataProcInstruction(kind,
1599 out_lo,
1600 out_lo,
1601 ShifterOperand(second_hi, LSL, 32 - shift_value),
1602 codegen);
1603 GenerateDataProcInstruction(kind,
1604 out_hi,
1605 first_hi,
1606 ShifterOperand(second_hi, shift, shift_value),
1607 codegen);
1608 } else {
1609 __ Lsr(IP, second_lo, shift_value);
1610 __ orr(IP, IP, ShifterOperand(second_hi, LSL, 32 - shift_value));
1611 GenerateDataProc(kind,
1612 out,
1613 first,
1614 ShifterOperand(IP),
1615 ShifterOperand(second_hi, shift, shift_value),
1616 codegen);
1617 }
1618 }
1619 }
1620}
1621
Donghui Bai426b49c2016-11-08 14:55:38 +08001622static void GenerateVcmp(HInstruction* instruction, CodeGeneratorARM* codegen) {
1623 Primitive::Type type = instruction->InputAt(0)->GetType();
1624 Location lhs_loc = instruction->GetLocations()->InAt(0);
1625 Location rhs_loc = instruction->GetLocations()->InAt(1);
1626 if (rhs_loc.IsConstant()) {
1627 // 0.0 is the only immediate that can be encoded directly in
1628 // a VCMP instruction.
1629 //
1630 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
1631 // specify that in a floating-point comparison, positive zero
1632 // and negative zero are considered equal, so we can use the
1633 // literal 0.0 for both cases here.
1634 //
1635 // Note however that some methods (Float.equal, Float.compare,
1636 // Float.compareTo, Double.equal, Double.compare,
1637 // Double.compareTo, Math.max, Math.min, StrictMath.max,
1638 // StrictMath.min) consider 0.0 to be (strictly) greater than
1639 // -0.0. So if we ever translate calls to these methods into a
1640 // HCompare instruction, we must handle the -0.0 case with
1641 // care here.
1642 DCHECK(rhs_loc.GetConstant()->IsArithmeticZero());
1643 if (type == Primitive::kPrimFloat) {
1644 __ vcmpsz(lhs_loc.AsFpuRegister<SRegister>());
1645 } else {
1646 DCHECK_EQ(type, Primitive::kPrimDouble);
1647 __ vcmpdz(FromLowSToD(lhs_loc.AsFpuRegisterPairLow<SRegister>()));
1648 }
1649 } else {
1650 if (type == Primitive::kPrimFloat) {
1651 __ vcmps(lhs_loc.AsFpuRegister<SRegister>(), rhs_loc.AsFpuRegister<SRegister>());
1652 } else {
1653 DCHECK_EQ(type, Primitive::kPrimDouble);
1654 __ vcmpd(FromLowSToD(lhs_loc.AsFpuRegisterPairLow<SRegister>()),
1655 FromLowSToD(rhs_loc.AsFpuRegisterPairLow<SRegister>()));
1656 }
1657 }
1658}
1659
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001660static int64_t AdjustConstantForCondition(int64_t value,
1661 IfCondition* condition,
1662 IfCondition* opposite) {
1663 if (value == 1) {
1664 if (*condition == kCondB) {
1665 value = 0;
1666 *condition = kCondEQ;
1667 *opposite = kCondNE;
1668 } else if (*condition == kCondAE) {
1669 value = 0;
1670 *condition = kCondNE;
1671 *opposite = kCondEQ;
1672 }
1673 } else if (value == -1) {
1674 if (*condition == kCondGT) {
1675 value = 0;
1676 *condition = kCondGE;
1677 *opposite = kCondLT;
1678 } else if (*condition == kCondLE) {
1679 value = 0;
1680 *condition = kCondLT;
1681 *opposite = kCondGE;
1682 }
1683 }
1684
1685 return value;
1686}
1687
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001688static std::pair<Condition, Condition> GenerateLongTestConstant(HCondition* condition,
1689 bool invert,
1690 CodeGeneratorARM* codegen) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001691 DCHECK_EQ(condition->GetLeft()->GetType(), Primitive::kPrimLong);
1692
1693 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001694 IfCondition cond = condition->GetCondition();
1695 IfCondition opposite = condition->GetOppositeCondition();
1696
1697 if (invert) {
1698 std::swap(cond, opposite);
1699 }
1700
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001701 std::pair<Condition, Condition> ret(EQ, NE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001702 const Location left = locations->InAt(0);
1703 const Location right = locations->InAt(1);
1704
1705 DCHECK(right.IsConstant());
1706
1707 const Register left_high = left.AsRegisterPairHigh<Register>();
1708 const Register left_low = left.AsRegisterPairLow<Register>();
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001709 int64_t value = AdjustConstantForCondition(right.GetConstant()->AsLongConstant()->GetValue(),
1710 &cond,
1711 &opposite);
1712
1713 // Comparisons against 0 are common enough to deserve special attention.
1714 if (value == 0) {
1715 switch (cond) {
1716 case kCondNE:
1717 // x > 0 iff x != 0 when the comparison is unsigned.
1718 case kCondA:
1719 ret = std::make_pair(NE, EQ);
1720 FALLTHROUGH_INTENDED;
1721 case kCondEQ:
1722 // x <= 0 iff x == 0 when the comparison is unsigned.
1723 case kCondBE:
1724 __ orrs(IP, left_low, ShifterOperand(left_high));
1725 return ret;
1726 case kCondLT:
1727 case kCondGE:
1728 __ cmp(left_high, ShifterOperand(0));
1729 return std::make_pair(ARMCondition(cond), ARMCondition(opposite));
1730 // Trivially true or false.
1731 case kCondB:
1732 ret = std::make_pair(NE, EQ);
1733 FALLTHROUGH_INTENDED;
1734 case kCondAE:
1735 __ cmp(left_low, ShifterOperand(left_low));
1736 return ret;
1737 default:
1738 break;
1739 }
1740 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001741
1742 switch (cond) {
1743 case kCondEQ:
1744 case kCondNE:
1745 case kCondB:
1746 case kCondBE:
1747 case kCondA:
1748 case kCondAE:
1749 __ CmpConstant(left_high, High32Bits(value));
1750 __ it(EQ);
1751 __ cmp(left_low, ShifterOperand(Low32Bits(value)), EQ);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001752 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001753 break;
1754 case kCondLE:
1755 case kCondGT:
1756 // Trivially true or false.
1757 if (value == std::numeric_limits<int64_t>::max()) {
1758 __ cmp(left_low, ShifterOperand(left_low));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001759 ret = cond == kCondLE ? std::make_pair(EQ, NE) : std::make_pair(NE, EQ);
Donghui Bai426b49c2016-11-08 14:55:38 +08001760 break;
1761 }
1762
1763 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001764 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001765 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001766 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001767 } else {
1768 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001769 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001770 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001771 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001772 }
1773
1774 value++;
1775 FALLTHROUGH_INTENDED;
1776 case kCondGE:
1777 case kCondLT:
1778 __ CmpConstant(left_low, Low32Bits(value));
1779 __ sbcs(IP, left_high, ShifterOperand(High32Bits(value)));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001780 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001781 break;
1782 default:
1783 LOG(FATAL) << "Unreachable";
1784 UNREACHABLE();
1785 }
1786
1787 return ret;
1788}
1789
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001790static std::pair<Condition, Condition> GenerateLongTest(HCondition* condition,
1791 bool invert,
1792 CodeGeneratorARM* codegen) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001793 DCHECK_EQ(condition->GetLeft()->GetType(), Primitive::kPrimLong);
1794
1795 const LocationSummary* const locations = condition->GetLocations();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001796 IfCondition cond = condition->GetCondition();
1797 IfCondition opposite = condition->GetOppositeCondition();
1798
1799 if (invert) {
1800 std::swap(cond, opposite);
1801 }
1802
1803 std::pair<Condition, Condition> ret;
Donghui Bai426b49c2016-11-08 14:55:38 +08001804 Location left = locations->InAt(0);
1805 Location right = locations->InAt(1);
1806
1807 DCHECK(right.IsRegisterPair());
1808
1809 switch (cond) {
1810 case kCondEQ:
1811 case kCondNE:
1812 case kCondB:
1813 case kCondBE:
1814 case kCondA:
1815 case kCondAE:
1816 __ cmp(left.AsRegisterPairHigh<Register>(),
1817 ShifterOperand(right.AsRegisterPairHigh<Register>()));
1818 __ it(EQ);
1819 __ cmp(left.AsRegisterPairLow<Register>(),
1820 ShifterOperand(right.AsRegisterPairLow<Register>()),
1821 EQ);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001822 ret = std::make_pair(ARMUnsignedCondition(cond), ARMUnsignedCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001823 break;
1824 case kCondLE:
1825 case kCondGT:
1826 if (cond == kCondLE) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001827 DCHECK_EQ(opposite, kCondGT);
Donghui Bai426b49c2016-11-08 14:55:38 +08001828 cond = kCondGE;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001829 opposite = kCondLT;
Donghui Bai426b49c2016-11-08 14:55:38 +08001830 } else {
1831 DCHECK_EQ(cond, kCondGT);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001832 DCHECK_EQ(opposite, kCondLE);
Donghui Bai426b49c2016-11-08 14:55:38 +08001833 cond = kCondLT;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001834 opposite = kCondGE;
Donghui Bai426b49c2016-11-08 14:55:38 +08001835 }
1836
1837 std::swap(left, right);
1838 FALLTHROUGH_INTENDED;
1839 case kCondGE:
1840 case kCondLT:
1841 __ cmp(left.AsRegisterPairLow<Register>(),
1842 ShifterOperand(right.AsRegisterPairLow<Register>()));
1843 __ sbcs(IP,
1844 left.AsRegisterPairHigh<Register>(),
1845 ShifterOperand(right.AsRegisterPairHigh<Register>()));
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001846 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001847 break;
1848 default:
1849 LOG(FATAL) << "Unreachable";
1850 UNREACHABLE();
1851 }
1852
1853 return ret;
1854}
1855
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001856static std::pair<Condition, Condition> GenerateTest(HCondition* condition,
1857 bool invert,
1858 CodeGeneratorARM* codegen) {
1859 const LocationSummary* const locations = condition->GetLocations();
1860 const Primitive::Type type = condition->GetLeft()->GetType();
1861 IfCondition cond = condition->GetCondition();
1862 IfCondition opposite = condition->GetOppositeCondition();
1863 std::pair<Condition, Condition> ret;
1864 const Location right = locations->InAt(1);
Donghui Bai426b49c2016-11-08 14:55:38 +08001865
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001866 if (invert) {
1867 std::swap(cond, opposite);
1868 }
Donghui Bai426b49c2016-11-08 14:55:38 +08001869
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001870 if (type == Primitive::kPrimLong) {
1871 ret = locations->InAt(1).IsConstant()
1872 ? GenerateLongTestConstant(condition, invert, codegen)
1873 : GenerateLongTest(condition, invert, codegen);
1874 } else if (Primitive::IsFloatingPointType(type)) {
1875 GenerateVcmp(condition, codegen);
1876 __ vmstat();
1877 ret = std::make_pair(ARMFPCondition(cond, condition->IsGtBias()),
1878 ARMFPCondition(opposite, condition->IsGtBias()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001879 } else {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001880 DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
Donghui Bai426b49c2016-11-08 14:55:38 +08001881
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001882 const Register left = locations->InAt(0).AsRegister<Register>();
1883
1884 if (right.IsRegister()) {
1885 __ cmp(left, ShifterOperand(right.AsRegister<Register>()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001886 } else {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001887 DCHECK(right.IsConstant());
1888 __ CmpConstant(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
Donghui Bai426b49c2016-11-08 14:55:38 +08001889 }
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001890
1891 ret = std::make_pair(ARMCondition(cond), ARMCondition(opposite));
Donghui Bai426b49c2016-11-08 14:55:38 +08001892 }
1893
1894 return ret;
1895}
1896
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001897static bool CanGenerateTest(HCondition* condition, ArmAssembler* assembler) {
1898 if (condition->GetLeft()->GetType() == Primitive::kPrimLong) {
1899 const LocationSummary* const locations = condition->GetLocations();
Donghui Bai426b49c2016-11-08 14:55:38 +08001900
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001901 if (locations->InAt(1).IsConstant()) {
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001902 IfCondition c = condition->GetCondition();
1903 IfCondition opposite = condition->GetOppositeCondition();
1904 const int64_t value = AdjustConstantForCondition(
1905 Int64FromConstant(locations->InAt(1).GetConstant()),
1906 &c,
1907 &opposite);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001908 ShifterOperand so;
Donghui Bai426b49c2016-11-08 14:55:38 +08001909
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001910 if (c < kCondLT || c > kCondGE) {
1911 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
1912 // we check that the least significant half of the first input to be compared
1913 // is in a low register (the other half is read outside an IT block), and
1914 // the constant fits in an 8-bit unsigned integer, so that a 16-bit CMP
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001915 // encoding can be used; 0 is always handled, no matter what registers are
1916 // used by the first input.
1917 if (value != 0 &&
1918 (!ArmAssembler::IsLowRegister(locations->InAt(0).AsRegisterPairLow<Register>()) ||
1919 !IsUint<8>(Low32Bits(value)))) {
Donghui Bai426b49c2016-11-08 14:55:38 +08001920 return false;
1921 }
Anton Kirilov217b2ce2017-03-16 11:47:12 +00001922 } else if (c == kCondLE || c == kCondGT) {
1923 if (value < std::numeric_limits<int64_t>::max() &&
1924 !assembler->ShifterOperandCanHold(kNoRegister,
1925 kNoRegister,
1926 SBC,
1927 High32Bits(value + 1),
1928 kCcSet,
1929 &so)) {
1930 return false;
1931 }
1932 } else if (!assembler->ShifterOperandCanHold(kNoRegister,
1933 kNoRegister,
1934 SBC,
1935 High32Bits(value),
1936 kCcSet,
1937 &so)) {
1938 return false;
Donghui Bai426b49c2016-11-08 14:55:38 +08001939 }
1940 }
1941 }
1942
1943 return true;
1944}
1945
Anton Kirilov5601d4e2017-05-11 19:33:50 +01001946static void GenerateConditionGeneric(HCondition* cond, CodeGeneratorARM* codegen) {
1947 DCHECK(CanGenerateTest(cond, codegen->GetAssembler()));
1948
1949 const Register out = cond->GetLocations()->Out().AsRegister<Register>();
1950 const auto condition = GenerateTest(cond, false, codegen);
1951
1952 __ mov(out, ShifterOperand(0), AL, kCcKeep);
1953
1954 if (ArmAssembler::IsLowRegister(out)) {
1955 __ it(condition.first);
1956 __ mov(out, ShifterOperand(1), condition.first);
1957 } else {
1958 Label done_label;
1959 Label* const final_label = codegen->GetFinalLabel(cond, &done_label);
1960
1961 __ b(final_label, condition.second);
1962 __ LoadImmediate(out, 1);
1963
1964 if (done_label.IsLinked()) {
1965 __ Bind(&done_label);
1966 }
1967 }
1968}
1969
1970static void GenerateEqualLong(HCondition* cond, CodeGeneratorARM* codegen) {
1971 DCHECK_EQ(cond->GetLeft()->GetType(), Primitive::kPrimLong);
1972
1973 const LocationSummary* const locations = cond->GetLocations();
1974 IfCondition condition = cond->GetCondition();
1975 const Register out = locations->Out().AsRegister<Register>();
1976 const Location left = locations->InAt(0);
1977 const Location right = locations->InAt(1);
1978 Register left_high = left.AsRegisterPairHigh<Register>();
1979 Register left_low = left.AsRegisterPairLow<Register>();
1980
1981 if (right.IsConstant()) {
1982 IfCondition opposite = cond->GetOppositeCondition();
1983 const int64_t value = AdjustConstantForCondition(Int64FromConstant(right.GetConstant()),
1984 &condition,
1985 &opposite);
1986 int32_t value_high = -High32Bits(value);
1987 int32_t value_low = -Low32Bits(value);
1988
1989 // The output uses Location::kNoOutputOverlap.
1990 if (out == left_high) {
1991 std::swap(left_low, left_high);
1992 std::swap(value_low, value_high);
1993 }
1994
1995 __ AddConstant(out, left_low, value_low);
1996 __ AddConstant(IP, left_high, value_high);
1997 } else {
1998 DCHECK(right.IsRegisterPair());
1999 __ sub(IP, left_high, ShifterOperand(right.AsRegisterPairHigh<Register>()));
2000 __ sub(out, left_low, ShifterOperand(right.AsRegisterPairLow<Register>()));
2001 }
2002
2003 // Need to check after calling AdjustConstantForCondition().
2004 DCHECK(condition == kCondEQ || condition == kCondNE) << condition;
2005
2006 if (condition == kCondNE && ArmAssembler::IsLowRegister(out)) {
2007 __ orrs(out, out, ShifterOperand(IP));
2008 __ it(NE);
2009 __ mov(out, ShifterOperand(1), NE);
2010 } else {
2011 __ orr(out, out, ShifterOperand(IP));
2012 codegen->GenerateConditionWithZero(condition, out, out, IP);
2013 }
2014}
2015
2016static void GenerateLongComparesAndJumps(HCondition* cond,
2017 Label* true_label,
2018 Label* false_label,
2019 CodeGeneratorARM* codegen) {
2020 LocationSummary* locations = cond->GetLocations();
2021 Location left = locations->InAt(0);
2022 Location right = locations->InAt(1);
2023 IfCondition if_cond = cond->GetCondition();
2024
2025 Register left_high = left.AsRegisterPairHigh<Register>();
2026 Register left_low = left.AsRegisterPairLow<Register>();
2027 IfCondition true_high_cond = if_cond;
2028 IfCondition false_high_cond = cond->GetOppositeCondition();
2029 Condition final_condition = ARMUnsignedCondition(if_cond); // unsigned on lower part
2030
2031 // Set the conditions for the test, remembering that == needs to be
2032 // decided using the low words.
2033 switch (if_cond) {
2034 case kCondEQ:
2035 case kCondNE:
2036 // Nothing to do.
2037 break;
2038 case kCondLT:
2039 false_high_cond = kCondGT;
2040 break;
2041 case kCondLE:
2042 true_high_cond = kCondLT;
2043 break;
2044 case kCondGT:
2045 false_high_cond = kCondLT;
2046 break;
2047 case kCondGE:
2048 true_high_cond = kCondGT;
2049 break;
2050 case kCondB:
2051 false_high_cond = kCondA;
2052 break;
2053 case kCondBE:
2054 true_high_cond = kCondB;
2055 break;
2056 case kCondA:
2057 false_high_cond = kCondB;
2058 break;
2059 case kCondAE:
2060 true_high_cond = kCondA;
2061 break;
2062 }
2063 if (right.IsConstant()) {
2064 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
2065 int32_t val_low = Low32Bits(value);
2066 int32_t val_high = High32Bits(value);
2067
2068 __ CmpConstant(left_high, val_high);
2069 if (if_cond == kCondNE) {
2070 __ b(true_label, ARMCondition(true_high_cond));
2071 } else if (if_cond == kCondEQ) {
2072 __ b(false_label, ARMCondition(false_high_cond));
2073 } else {
2074 __ b(true_label, ARMCondition(true_high_cond));
2075 __ b(false_label, ARMCondition(false_high_cond));
2076 }
2077 // Must be equal high, so compare the lows.
2078 __ CmpConstant(left_low, val_low);
2079 } else {
2080 Register right_high = right.AsRegisterPairHigh<Register>();
2081 Register right_low = right.AsRegisterPairLow<Register>();
2082
2083 __ cmp(left_high, ShifterOperand(right_high));
2084 if (if_cond == kCondNE) {
2085 __ b(true_label, ARMCondition(true_high_cond));
2086 } else if (if_cond == kCondEQ) {
2087 __ b(false_label, ARMCondition(false_high_cond));
2088 } else {
2089 __ b(true_label, ARMCondition(true_high_cond));
2090 __ b(false_label, ARMCondition(false_high_cond));
2091 }
2092 // Must be equal high, so compare the lows.
2093 __ cmp(left_low, ShifterOperand(right_low));
2094 }
2095 // The last comparison might be unsigned.
2096 // TODO: optimize cases where this is always true/false
2097 __ b(true_label, final_condition);
2098}
2099
2100static void GenerateConditionLong(HCondition* cond, CodeGeneratorARM* codegen) {
2101 DCHECK_EQ(cond->GetLeft()->GetType(), Primitive::kPrimLong);
2102
2103 const LocationSummary* const locations = cond->GetLocations();
2104 IfCondition condition = cond->GetCondition();
2105 const Register out = locations->Out().AsRegister<Register>();
2106 const Location left = locations->InAt(0);
2107 const Location right = locations->InAt(1);
2108
2109 if (right.IsConstant()) {
2110 IfCondition opposite = cond->GetOppositeCondition();
2111
2112 // Comparisons against 0 are common enough to deserve special attention.
2113 if (AdjustConstantForCondition(Int64FromConstant(right.GetConstant()),
2114 &condition,
2115 &opposite) == 0) {
2116 switch (condition) {
2117 case kCondNE:
2118 case kCondA:
2119 if (ArmAssembler::IsLowRegister(out)) {
2120 // We only care if both input registers are 0 or not.
2121 __ orrs(out,
2122 left.AsRegisterPairLow<Register>(),
2123 ShifterOperand(left.AsRegisterPairHigh<Register>()));
2124 __ it(NE);
2125 __ mov(out, ShifterOperand(1), NE);
2126 return;
2127 }
2128
2129 FALLTHROUGH_INTENDED;
2130 case kCondEQ:
2131 case kCondBE:
2132 // We only care if both input registers are 0 or not.
2133 __ orr(out,
2134 left.AsRegisterPairLow<Register>(),
2135 ShifterOperand(left.AsRegisterPairHigh<Register>()));
2136 codegen->GenerateConditionWithZero(condition, out, out);
2137 return;
2138 case kCondLT:
2139 case kCondGE:
2140 // We only care about the sign bit.
2141 FALLTHROUGH_INTENDED;
2142 case kCondAE:
2143 case kCondB:
2144 codegen->GenerateConditionWithZero(condition, out, left.AsRegisterPairHigh<Register>());
2145 return;
2146 case kCondLE:
2147 case kCondGT:
2148 default:
2149 break;
2150 }
2151 }
2152 }
2153
2154 if ((condition == kCondEQ || condition == kCondNE) &&
2155 // If `out` is a low register, then the GenerateConditionGeneric()
2156 // function generates a shorter code sequence that is still branchless.
2157 (!ArmAssembler::IsLowRegister(out) || !CanGenerateTest(cond, codegen->GetAssembler()))) {
2158 GenerateEqualLong(cond, codegen);
2159 return;
2160 }
2161
2162 if (CanGenerateTest(cond, codegen->GetAssembler())) {
2163 GenerateConditionGeneric(cond, codegen);
2164 return;
2165 }
2166
2167 // Convert the jumps into the result.
2168 Label done_label;
2169 Label* const final_label = codegen->GetFinalLabel(cond, &done_label);
2170 Label true_label, false_label;
2171
2172 GenerateLongComparesAndJumps(cond, &true_label, &false_label, codegen);
2173
2174 // False case: result = 0.
2175 __ Bind(&false_label);
2176 __ mov(out, ShifterOperand(0));
2177 __ b(final_label);
2178
2179 // True case: result = 1.
2180 __ Bind(&true_label);
2181 __ mov(out, ShifterOperand(1));
2182
2183 if (done_label.IsLinked()) {
2184 __ Bind(&done_label);
2185 }
2186}
2187
2188static void GenerateConditionIntegralOrNonPrimitive(HCondition* cond, CodeGeneratorARM* codegen) {
2189 const Primitive::Type type = cond->GetLeft()->GetType();
2190
2191 DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
2192
2193 if (type == Primitive::kPrimLong) {
2194 GenerateConditionLong(cond, codegen);
2195 return;
2196 }
2197
2198 const LocationSummary* const locations = cond->GetLocations();
2199 IfCondition condition = cond->GetCondition();
2200 Register in = locations->InAt(0).AsRegister<Register>();
2201 const Register out = locations->Out().AsRegister<Register>();
2202 const Location right = cond->GetLocations()->InAt(1);
2203 int64_t value;
2204
2205 if (right.IsConstant()) {
2206 IfCondition opposite = cond->GetOppositeCondition();
2207
2208 value = AdjustConstantForCondition(Int64FromConstant(right.GetConstant()),
2209 &condition,
2210 &opposite);
2211
2212 // Comparisons against 0 are common enough to deserve special attention.
2213 if (value == 0) {
2214 switch (condition) {
2215 case kCondNE:
2216 case kCondA:
2217 if (ArmAssembler::IsLowRegister(out) && out == in) {
2218 __ cmp(out, ShifterOperand(0));
2219 __ it(NE);
2220 __ mov(out, ShifterOperand(1), NE);
2221 return;
2222 }
2223
2224 FALLTHROUGH_INTENDED;
2225 case kCondEQ:
2226 case kCondBE:
2227 case kCondLT:
2228 case kCondGE:
2229 case kCondAE:
2230 case kCondB:
2231 codegen->GenerateConditionWithZero(condition, out, in);
2232 return;
2233 case kCondLE:
2234 case kCondGT:
2235 default:
2236 break;
2237 }
2238 }
2239 }
2240
2241 if (condition == kCondEQ || condition == kCondNE) {
2242 ShifterOperand operand;
2243
2244 if (right.IsConstant()) {
2245 operand = ShifterOperand(value);
2246 } else if (out == right.AsRegister<Register>()) {
2247 // Avoid 32-bit instructions if possible.
2248 operand = ShifterOperand(in);
2249 in = right.AsRegister<Register>();
2250 } else {
2251 operand = ShifterOperand(right.AsRegister<Register>());
2252 }
2253
2254 if (condition == kCondNE && ArmAssembler::IsLowRegister(out)) {
2255 __ subs(out, in, operand);
2256 __ it(NE);
2257 __ mov(out, ShifterOperand(1), NE);
2258 } else {
2259 __ sub(out, in, operand);
2260 codegen->GenerateConditionWithZero(condition, out, out);
2261 }
2262
2263 return;
2264 }
2265
2266 GenerateConditionGeneric(cond, codegen);
2267}
2268
Donghui Bai426b49c2016-11-08 14:55:38 +08002269static bool CanEncodeConstantAs8BitImmediate(HConstant* constant) {
2270 const Primitive::Type type = constant->GetType();
2271 bool ret = false;
2272
2273 DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
2274
2275 if (type == Primitive::kPrimLong) {
2276 const uint64_t value = constant->AsLongConstant()->GetValueAsUint64();
2277
2278 ret = IsUint<8>(Low32Bits(value)) && IsUint<8>(High32Bits(value));
2279 } else {
2280 ret = IsUint<8>(CodeGenerator::GetInt32ValueOf(constant));
2281 }
2282
2283 return ret;
2284}
2285
2286static Location Arm8BitEncodableConstantOrRegister(HInstruction* constant) {
2287 DCHECK(!Primitive::IsFloatingPointType(constant->GetType()));
2288
2289 if (constant->IsConstant() && CanEncodeConstantAs8BitImmediate(constant->AsConstant())) {
2290 return Location::ConstantLocation(constant->AsConstant());
2291 }
2292
2293 return Location::RequiresRegister();
2294}
2295
2296static bool CanGenerateConditionalMove(const Location& out, const Location& src) {
2297 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
2298 // we check that we are not dealing with floating-point output (there is no
2299 // 16-bit VMOV encoding).
2300 if (!out.IsRegister() && !out.IsRegisterPair()) {
2301 return false;
2302 }
2303
2304 // For constants, we also check that the output is in one or two low registers,
2305 // and that the constants fit in an 8-bit unsigned integer, so that a 16-bit
2306 // MOV encoding can be used.
2307 if (src.IsConstant()) {
2308 if (!CanEncodeConstantAs8BitImmediate(src.GetConstant())) {
2309 return false;
2310 }
2311
2312 if (out.IsRegister()) {
2313 if (!ArmAssembler::IsLowRegister(out.AsRegister<Register>())) {
2314 return false;
2315 }
2316 } else {
2317 DCHECK(out.IsRegisterPair());
2318
2319 if (!ArmAssembler::IsLowRegister(out.AsRegisterPairHigh<Register>())) {
2320 return false;
2321 }
2322 }
2323 }
2324
2325 return true;
2326}
2327
Anton Kirilov74234da2017-01-13 14:42:47 +00002328#undef __
2329// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
2330#define __ down_cast<ArmAssembler*>(GetAssembler())-> // NOLINT
2331
Donghui Bai426b49c2016-11-08 14:55:38 +08002332Label* CodeGeneratorARM::GetFinalLabel(HInstruction* instruction, Label* final_label) {
2333 DCHECK(!instruction->IsControlFlow() && !instruction->IsSuspendCheck());
Anton Kirilov6f644202017-02-27 18:29:45 +00002334 DCHECK(!instruction->IsInvoke() || !instruction->GetLocations()->CanCall());
Donghui Bai426b49c2016-11-08 14:55:38 +08002335
2336 const HBasicBlock* const block = instruction->GetBlock();
2337 const HLoopInformation* const info = block->GetLoopInformation();
2338 HInstruction* const next = instruction->GetNext();
2339
2340 // Avoid a branch to a branch.
2341 if (next->IsGoto() && (info == nullptr ||
2342 !info->IsBackEdge(*block) ||
2343 !info->HasSuspendCheck())) {
2344 final_label = GetLabelOf(next->AsGoto()->GetSuccessor());
2345 }
2346
2347 return final_label;
2348}
2349
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01002350void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01002351 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01002352}
2353
2354void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01002355 stream << SRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +01002356}
2357
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002358size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
2359 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
2360 return kArmWordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01002361}
2362
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002363size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
2364 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
2365 return kArmWordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01002366}
2367
Nicolas Geoffray840e5462015-01-07 16:01:24 +00002368size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
2369 __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
2370 return kArmWordSize;
2371}
2372
2373size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
2374 __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
2375 return kArmWordSize;
2376}
2377
Calin Juravle34166012014-12-19 17:22:29 +00002378CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002379 const ArmInstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01002380 const CompilerOptions& compiler_options,
2381 OptimizingCompilerStats* stats)
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002382 : CodeGenerator(graph,
2383 kNumberOfCoreRegisters,
2384 kNumberOfSRegisters,
2385 kNumberOfRegisterPairs,
2386 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
2387 arraysize(kCoreCalleeSaves)),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +00002388 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
2389 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01002390 compiler_options,
2391 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01002392 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002393 location_builder_(graph, this),
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01002394 instruction_visitor_(graph, this),
Nicolas Geoffray8d486732014-07-16 16:23:40 +01002395 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01002396 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00002397 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00002398 uint32_literals_(std::less<uint32_t>(),
2399 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01002400 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01002401 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01002402 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00002403 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01002404 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01002405 baker_read_barrier_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00002406 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00002407 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
2408 jit_class_patches_(TypeReferenceValueComparator(),
2409 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Andreas Gampe501fd632015-09-10 16:11:06 -07002410 // Always save the LR register to mimic Quick.
2411 AddAllocatedRegister(Location::RegisterLocation(LR));
Nicolas Geoffrayab032bc2014-07-15 12:55:21 +01002412}
2413
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002414void CodeGeneratorARM::Finalize(CodeAllocator* allocator) {
2415 // Ensure that we fix up branches and literal loads and emit the literal pool.
2416 __ FinalizeCode();
2417
2418 // Adjust native pc offsets in stack maps.
2419 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08002420 uint32_t old_position =
2421 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kThumb2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002422 uint32_t new_position = __ GetAdjustedPosition(old_position);
2423 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
2424 }
Alexandre Rameseb7b7392015-06-19 14:47:01 +01002425 // Adjust pc offsets for the disassembly information.
2426 if (disasm_info_ != nullptr) {
2427 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
2428 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
2429 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
2430 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
2431 it.second.start = __ GetAdjustedPosition(it.second.start);
2432 it.second.end = __ GetAdjustedPosition(it.second.end);
2433 }
2434 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
2435 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
2436 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
2437 }
2438 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002439
2440 CodeGenerator::Finalize(allocator);
2441}
2442
David Brazdil58282f42016-01-14 12:45:10 +00002443void CodeGeneratorARM::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002444 // Stack register, LR and PC are always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01002445 blocked_core_registers_[SP] = true;
2446 blocked_core_registers_[LR] = true;
2447 blocked_core_registers_[PC] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002448
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002449 // Reserve thread register.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01002450 blocked_core_registers_[TR] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002451
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01002452 // Reserve temp register.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01002453 blocked_core_registers_[IP] = true;
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01002454
David Brazdil58282f42016-01-14 12:45:10 +00002455 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01002456 // Stubs do not save callee-save floating point registers. If the graph
2457 // is debuggable, we need to deal with these registers differently. For
2458 // now, just block them.
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002459 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
2460 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
2461 }
2462 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002463}
2464
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01002465InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002466 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01002467 assembler_(codegen->GetAssembler()),
2468 codegen_(codegen) {}
2469
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002470void CodeGeneratorARM::ComputeSpillMask() {
2471 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
2472 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
David Brazdil58282f42016-01-14 12:45:10 +00002473 // There is no easy instruction to restore just the PC on thumb2. We spill and
2474 // restore another arbitrary register.
2475 core_spill_mask_ |= (1 << kCoreAlwaysSpillRegister);
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002476 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
2477 // We use vpush and vpop for saving and restoring floating point registers, which take
2478 // a SRegister and the number of registers to save/restore after that SRegister. We
2479 // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
2480 // but in the range.
2481 if (fpu_spill_mask_ != 0) {
2482 uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
2483 uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
2484 for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
2485 fpu_spill_mask_ |= (1 << i);
2486 }
2487 }
2488}
2489
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002490static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01002491 return dwarf::Reg::ArmCore(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002492}
2493
2494static dwarf::Reg DWARFReg(SRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01002495 return dwarf::Reg::ArmFp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002496}
2497
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002498void CodeGeneratorARM::GenerateFrameEntry() {
Roland Levillain199f3362014-11-27 17:15:16 +00002499 bool skip_overflow_check =
2500 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00002501 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002502 __ Bind(&frame_entry_label_);
2503
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00002504 if (HasEmptyFrame()) {
2505 return;
2506 }
2507
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002508 if (!skip_overflow_check) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00002509 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
2510 __ LoadFromOffset(kLoadWord, IP, IP, 0);
2511 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002512 }
2513
Andreas Gampe501fd632015-09-10 16:11:06 -07002514 __ PushList(core_spill_mask_);
2515 __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(core_spill_mask_));
2516 __ cfi().RelOffsetForMany(DWARFReg(kMethodRegisterArgument), 0, core_spill_mask_, kArmWordSize);
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002517 if (fpu_spill_mask_ != 0) {
2518 SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
2519 __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002520 __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
David Srbecky9d8606d2015-04-12 09:35:32 +01002521 __ cfi().RelOffsetForMany(DWARFReg(S0), 0, fpu_spill_mask_, kArmWordSize);
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002522 }
Mingyao Yang063fc772016-08-02 11:02:54 -07002523
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002524 int adjust = GetFrameSize() - FrameEntrySpillSize();
2525 __ AddConstant(SP, -adjust);
2526 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01002527
2528 // Save the current method if we need it. Note that we do not
2529 // do this in HCurrentMethod, as the instruction might have been removed
2530 // in the SSA graph.
2531 if (RequiresCurrentMethod()) {
2532 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, 0);
2533 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01002534
2535 if (GetGraph()->HasShouldDeoptimizeFlag()) {
2536 // Initialize should_deoptimize flag to 0.
2537 __ mov(IP, ShifterOperand(0));
2538 __ StoreToOffset(kStoreWord, IP, SP, GetStackOffsetOfShouldDeoptimizeFlag());
2539 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002540}
2541
2542void CodeGeneratorARM::GenerateFrameExit() {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00002543 if (HasEmptyFrame()) {
2544 __ bx(LR);
2545 return;
2546 }
David Srbeckyc34dc932015-04-12 09:27:43 +01002547 __ cfi().RememberState();
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002548 int adjust = GetFrameSize() - FrameEntrySpillSize();
2549 __ AddConstant(SP, adjust);
2550 __ cfi().AdjustCFAOffset(-adjust);
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002551 if (fpu_spill_mask_ != 0) {
2552 SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
2553 __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
Andreas Gampe542451c2016-07-26 09:02:02 -07002554 __ cfi().AdjustCFAOffset(-static_cast<int>(kArmPointerSize) * POPCOUNT(fpu_spill_mask_));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01002555 __ cfi().RestoreMany(DWARFReg(SRegister(0)), fpu_spill_mask_);
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00002556 }
Andreas Gampe501fd632015-09-10 16:11:06 -07002557 // Pop LR into PC to return.
2558 DCHECK_NE(core_spill_mask_ & (1 << LR), 0U);
2559 uint32_t pop_mask = (core_spill_mask_ & (~(1 << LR))) | 1 << PC;
2560 __ PopList(pop_mask);
David Srbeckyc34dc932015-04-12 09:27:43 +01002561 __ cfi().RestoreState();
2562 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002563}
2564
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01002565void CodeGeneratorARM::Bind(HBasicBlock* block) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002566 Label* label = GetLabelOf(block);
2567 __ BindTrackedLabel(label);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002568}
2569
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002570Location InvokeDexCallingConventionVisitorARM::GetNextLocation(Primitive::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002571 switch (type) {
2572 case Primitive::kPrimBoolean:
2573 case Primitive::kPrimByte:
2574 case Primitive::kPrimChar:
2575 case Primitive::kPrimShort:
2576 case Primitive::kPrimInt:
2577 case Primitive::kPrimNot: {
2578 uint32_t index = gp_index_++;
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002579 uint32_t stack_index = stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002580 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002581 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002582 } else {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002583 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01002584 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01002585 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002586
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002587 case Primitive::kPrimLong: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002588 uint32_t index = gp_index_;
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002589 uint32_t stack_index = stack_index_;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002590 gp_index_ += 2;
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002591 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002592 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +00002593 if (calling_convention.GetRegisterAt(index) == R1) {
2594 // Skip R1, and use R2_R3 instead.
2595 gp_index_++;
2596 index++;
2597 }
2598 }
2599 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
2600 DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
Nicolas Geoffrayaf2c65c2015-01-14 09:40:32 +00002601 calling_convention.GetRegisterAt(index + 1));
Calin Juravle175dc732015-08-25 15:42:32 +01002602
Nicolas Geoffray69c15d32015-01-13 11:42:13 +00002603 return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
Nicolas Geoffrayaf2c65c2015-01-14 09:40:32 +00002604 calling_convention.GetRegisterAt(index + 1));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002605 } else {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002606 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
2607 }
2608 }
2609
2610 case Primitive::kPrimFloat: {
2611 uint32_t stack_index = stack_index_++;
2612 if (float_index_ % 2 == 0) {
2613 float_index_ = std::max(double_index_, float_index_);
2614 }
2615 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
2616 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
2617 } else {
2618 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
2619 }
2620 }
2621
2622 case Primitive::kPrimDouble: {
2623 double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
2624 uint32_t stack_index = stack_index_;
2625 stack_index_ += 2;
2626 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
2627 uint32_t index = double_index_;
2628 double_index_ += 2;
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00002629 Location result = Location::FpuRegisterPairLocation(
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002630 calling_convention.GetFpuRegisterAt(index),
2631 calling_convention.GetFpuRegisterAt(index + 1));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00002632 DCHECK(ExpectedPairLayout(result));
2633 return result;
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002634 } else {
2635 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002636 }
2637 }
2638
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002639 case Primitive::kPrimVoid:
2640 LOG(FATAL) << "Unexpected parameter type " << type;
2641 break;
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01002642 }
Roland Levillain3b359c72015-11-17 19:35:12 +00002643 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01002644}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01002645
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002646Location InvokeDexCallingConventionVisitorARM::GetReturnLocation(Primitive::Type type) const {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002647 switch (type) {
2648 case Primitive::kPrimBoolean:
2649 case Primitive::kPrimByte:
2650 case Primitive::kPrimChar:
2651 case Primitive::kPrimShort:
2652 case Primitive::kPrimInt:
2653 case Primitive::kPrimNot: {
2654 return Location::RegisterLocation(R0);
2655 }
2656
2657 case Primitive::kPrimFloat: {
2658 return Location::FpuRegisterLocation(S0);
2659 }
2660
2661 case Primitive::kPrimLong: {
2662 return Location::RegisterPairLocation(R0, R1);
2663 }
2664
2665 case Primitive::kPrimDouble: {
2666 return Location::FpuRegisterPairLocation(S0, S1);
2667 }
2668
2669 case Primitive::kPrimVoid:
Roland Levillain3b359c72015-11-17 19:35:12 +00002670 return Location::NoLocation();
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002671 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002672
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002673 UNREACHABLE();
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002674}
2675
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002676Location InvokeDexCallingConventionVisitorARM::GetMethodLocation() const {
2677 return Location::RegisterLocation(kMethodRegisterArgument);
2678}
2679
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002680void CodeGeneratorARM::Move32(Location destination, Location source) {
2681 if (source.Equals(destination)) {
2682 return;
2683 }
2684 if (destination.IsRegister()) {
2685 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002686 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002687 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002688 __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002689 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002690 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002691 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002692 } else if (destination.IsFpuRegister()) {
2693 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002694 __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002695 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002696 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002697 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002698 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002699 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002700 } else {
Calin Juravlea21f5982014-11-13 15:53:04 +00002701 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002702 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002703 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002704 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002705 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002706 } else {
Calin Juravlea21f5982014-11-13 15:53:04 +00002707 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffray360231a2014-10-08 21:07:48 +01002708 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2709 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002710 }
2711 }
2712}
2713
2714void CodeGeneratorARM::Move64(Location destination, Location source) {
2715 if (source.Equals(destination)) {
2716 return;
2717 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002718 if (destination.IsRegisterPair()) {
2719 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00002720 EmitParallelMoves(
2721 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
2722 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01002723 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00002724 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01002725 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
2726 Primitive::kPrimInt);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002727 } else if (source.IsFpuRegister()) {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002728 UNIMPLEMENTED(FATAL);
Calin Juravlee460d1d2015-09-29 04:52:17 +01002729 } else if (source.IsFpuRegisterPair()) {
2730 __ vmovrrd(destination.AsRegisterPairLow<Register>(),
2731 destination.AsRegisterPairHigh<Register>(),
2732 FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002733 } else {
2734 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00002735 DCHECK(ExpectedPairLayout(destination));
2736 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
2737 SP, source.GetStackIndex());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002738 }
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002739 } else if (destination.IsFpuRegisterPair()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002740 if (source.IsDoubleStackSlot()) {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002741 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
2742 SP,
2743 source.GetStackIndex());
Calin Juravlee460d1d2015-09-29 04:52:17 +01002744 } else if (source.IsRegisterPair()) {
2745 __ vmovdrr(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
2746 source.AsRegisterPairLow<Register>(),
2747 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002748 } else {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002749 UNIMPLEMENTED(FATAL);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002750 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002751 } else {
2752 DCHECK(destination.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002753 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00002754 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002755 if (source.AsRegisterPairLow<Register>() == R1) {
2756 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
Nicolas Geoffray360231a2014-10-08 21:07:48 +01002757 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
2758 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002759 } else {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002760 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002761 SP, destination.GetStackIndex());
2762 }
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00002763 } else if (source.IsFpuRegisterPair()) {
2764 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
2765 SP,
2766 destination.GetStackIndex());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002767 } else {
2768 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00002769 EmitParallelMoves(
2770 Location::StackSlot(source.GetStackIndex()),
2771 Location::StackSlot(destination.GetStackIndex()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01002772 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00002773 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01002774 Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)),
2775 Primitive::kPrimInt);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002776 }
2777 }
2778}
2779
Calin Juravle175dc732015-08-25 15:42:32 +01002780void CodeGeneratorARM::MoveConstant(Location location, int32_t value) {
2781 DCHECK(location.IsRegister());
2782 __ LoadImmediate(location.AsRegister<Register>(), value);
2783}
2784
Calin Juravlee460d1d2015-09-29 04:52:17 +01002785void CodeGeneratorARM::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
David Brazdil74eb1b22015-12-14 11:44:01 +00002786 HParallelMove move(GetGraph()->GetArena());
2787 move.AddMove(src, dst, dst_type, nullptr);
2788 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01002789}
2790
2791void CodeGeneratorARM::AddLocationAsTemp(Location location, LocationSummary* locations) {
2792 if (location.IsRegister()) {
2793 locations->AddTemp(location);
2794 } else if (location.IsRegisterPair()) {
2795 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
2796 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
2797 } else {
2798 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
2799 }
2800}
2801
Calin Juravle175dc732015-08-25 15:42:32 +01002802void CodeGeneratorARM::InvokeRuntime(QuickEntrypointEnum entrypoint,
2803 HInstruction* instruction,
2804 uint32_t dex_pc,
2805 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002806 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01002807 GenerateInvokeRuntime(GetThreadOffset<kArmPointerSize>(entrypoint).Int32Value());
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002808 if (EntrypointRequiresStackMap(entrypoint)) {
2809 RecordPcInfo(instruction, dex_pc, slow_path);
2810 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01002811}
2812
Roland Levillaindec8f632016-07-22 17:10:06 +01002813void CodeGeneratorARM::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2814 HInstruction* instruction,
2815 SlowPathCode* slow_path) {
2816 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01002817 GenerateInvokeRuntime(entry_point_offset);
2818}
2819
2820void CodeGeneratorARM::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01002821 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2822 __ blx(LR);
2823}
2824
David Brazdilfc6a86a2015-06-26 10:33:45 +00002825void InstructionCodeGeneratorARM::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01002826 DCHECK(!successor->IsExitBlock());
2827
2828 HBasicBlock* block = got->GetBlock();
2829 HInstruction* previous = got->GetPrevious();
2830
2831 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00002832 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01002833 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2834 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2835 return;
2836 }
2837
2838 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2839 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2840 }
2841 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002842 __ b(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002843 }
2844}
2845
David Brazdilfc6a86a2015-06-26 10:33:45 +00002846void LocationsBuilderARM::VisitGoto(HGoto* got) {
2847 got->SetLocations(nullptr);
2848}
2849
2850void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
2851 HandleGoto(got, got->GetSuccessor());
2852}
2853
2854void LocationsBuilderARM::VisitTryBoundary(HTryBoundary* try_boundary) {
2855 try_boundary->SetLocations(nullptr);
2856}
2857
2858void InstructionCodeGeneratorARM::VisitTryBoundary(HTryBoundary* try_boundary) {
2859 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2860 if (!successor->IsExitBlock()) {
2861 HandleGoto(try_boundary, successor);
2862 }
2863}
2864
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002865void LocationsBuilderARM::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002866 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002867}
2868
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002869void InstructionCodeGeneratorARM::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002870}
2871
David Brazdil0debae72015-11-12 18:37:00 +00002872void InstructionCodeGeneratorARM::GenerateCompareTestAndBranch(HCondition* condition,
2873 Label* true_target_in,
2874 Label* false_target_in) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002875 if (CanGenerateTest(condition, codegen_->GetAssembler())) {
2876 Label* non_fallthrough_target;
2877 bool invert;
2878
2879 if (true_target_in == nullptr) {
2880 DCHECK(false_target_in != nullptr);
2881 non_fallthrough_target = false_target_in;
2882 invert = true;
2883 } else {
2884 non_fallthrough_target = true_target_in;
2885 invert = false;
2886 }
2887
2888 const auto cond = GenerateTest(condition, invert, codegen_);
2889
2890 __ b(non_fallthrough_target, cond.first);
2891
2892 if (false_target_in != nullptr && false_target_in != non_fallthrough_target) {
2893 __ b(false_target_in);
2894 }
2895
2896 return;
2897 }
2898
David Brazdil0debae72015-11-12 18:37:00 +00002899 // Generated branching requires both targets to be explicit. If either of the
2900 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2901 Label fallthrough_target;
2902 Label* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
2903 Label* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
2904
Anton Kirilov217b2ce2017-03-16 11:47:12 +00002905 DCHECK_EQ(condition->InputAt(0)->GetType(), Primitive::kPrimLong);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002906 GenerateLongComparesAndJumps(condition, true_target, false_target, codegen_);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002907
David Brazdil0debae72015-11-12 18:37:00 +00002908 if (false_target != &fallthrough_target) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01002909 __ b(false_target);
2910 }
David Brazdil0debae72015-11-12 18:37:00 +00002911
2912 if (fallthrough_target.IsLinked()) {
2913 __ Bind(&fallthrough_target);
2914 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01002915}
2916
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002917void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002918 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002919 Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002920 Label* false_target) {
2921 HInstruction* cond = instruction->InputAt(condition_input_index);
2922
2923 if (true_target == nullptr && false_target == nullptr) {
2924 // Nothing to do. The code always falls through.
2925 return;
2926 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002927 // Constant condition, statically compared against "true" (integer value 1).
2928 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002929 if (true_target != nullptr) {
2930 __ b(true_target);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002931 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002932 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002933 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002934 if (false_target != nullptr) {
2935 __ b(false_target);
2936 }
2937 }
2938 return;
2939 }
2940
2941 // The following code generates these patterns:
2942 // (1) true_target == nullptr && false_target != nullptr
2943 // - opposite condition true => branch to false_target
2944 // (2) true_target != nullptr && false_target == nullptr
2945 // - condition true => branch to true_target
2946 // (3) true_target != nullptr && false_target != nullptr
2947 // - condition true => branch to true_target
2948 // - branch to false_target
2949 if (IsBooleanValueOrMaterializedCondition(cond)) {
2950 // Condition has been materialized, compare the output to 0.
2951 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
2952 DCHECK(cond_val.IsRegister());
2953 if (true_target == nullptr) {
2954 __ CompareAndBranchIfZero(cond_val.AsRegister<Register>(), false_target);
2955 } else {
2956 __ CompareAndBranchIfNonZero(cond_val.AsRegister<Register>(), true_target);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002957 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002958 } else {
David Brazdil0debae72015-11-12 18:37:00 +00002959 // Condition has not been materialized. Use its inputs as the comparison and
2960 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04002961 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00002962
2963 // If this is a long or FP comparison that has been folded into
2964 // the HCondition, generate the comparison directly.
2965 Primitive::Type type = condition->InputAt(0)->GetType();
2966 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
2967 GenerateCompareTestAndBranch(condition, true_target, false_target);
2968 return;
2969 }
2970
Donghui Bai426b49c2016-11-08 14:55:38 +08002971 Label* non_fallthrough_target;
2972 Condition arm_cond;
David Brazdil0debae72015-11-12 18:37:00 +00002973 LocationSummary* locations = cond->GetLocations();
2974 DCHECK(locations->InAt(0).IsRegister());
2975 Register left = locations->InAt(0).AsRegister<Register>();
2976 Location right = locations->InAt(1);
Donghui Bai426b49c2016-11-08 14:55:38 +08002977
David Brazdil0debae72015-11-12 18:37:00 +00002978 if (true_target == nullptr) {
Donghui Bai426b49c2016-11-08 14:55:38 +08002979 arm_cond = ARMCondition(condition->GetOppositeCondition());
2980 non_fallthrough_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002981 } else {
Donghui Bai426b49c2016-11-08 14:55:38 +08002982 arm_cond = ARMCondition(condition->GetCondition());
2983 non_fallthrough_target = true_target;
2984 }
2985
2986 if (right.IsConstant() && (arm_cond == NE || arm_cond == EQ) &&
2987 CodeGenerator::GetInt32ValueOf(right.GetConstant()) == 0) {
2988 if (arm_cond == EQ) {
2989 __ CompareAndBranchIfZero(left, non_fallthrough_target);
2990 } else {
2991 DCHECK_EQ(arm_cond, NE);
2992 __ CompareAndBranchIfNonZero(left, non_fallthrough_target);
2993 }
2994 } else {
2995 if (right.IsRegister()) {
2996 __ cmp(left, ShifterOperand(right.AsRegister<Register>()));
2997 } else {
2998 DCHECK(right.IsConstant());
2999 __ CmpConstant(left, CodeGenerator::GetInt32ValueOf(right.GetConstant()));
3000 }
3001
3002 __ b(non_fallthrough_target, arm_cond);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01003003 }
Dave Allison20dfc792014-06-16 20:44:29 -07003004 }
David Brazdil0debae72015-11-12 18:37:00 +00003005
3006 // If neither branch falls through (case 3), the conditional branch to `true_target`
3007 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3008 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003009 __ b(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003010 }
3011}
3012
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003013void LocationsBuilderARM::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003014 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
3015 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003016 locations->SetInAt(0, Location::RequiresRegister());
3017 }
3018}
3019
3020void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003021 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3022 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
3023 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
3024 nullptr : codegen_->GetLabelOf(true_successor);
3025 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
3026 nullptr : codegen_->GetLabelOf(false_successor);
3027 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003028}
3029
3030void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) {
3031 LocationSummary* locations = new (GetGraph()->GetArena())
3032 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003033 InvokeRuntimeCallingConvention calling_convention;
3034 RegisterSet caller_saves = RegisterSet::Empty();
3035 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3036 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003037 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003038 locations->SetInAt(0, Location::RequiresRegister());
3039 }
3040}
3041
3042void InstructionCodeGeneratorARM::VisitDeoptimize(HDeoptimize* deoptimize) {
Artem Serovf4d6aee2016-07-11 10:41:45 +01003043 SlowPathCodeARM* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003044 GenerateTestAndBranch(deoptimize,
3045 /* condition_input_index */ 0,
3046 slow_path->GetEntryLabel(),
3047 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003048}
Dave Allison20dfc792014-06-16 20:44:29 -07003049
Mingyao Yang063fc772016-08-02 11:02:54 -07003050void LocationsBuilderARM::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3051 LocationSummary* locations = new (GetGraph()->GetArena())
3052 LocationSummary(flag, LocationSummary::kNoCall);
3053 locations->SetOut(Location::RequiresRegister());
3054}
3055
3056void InstructionCodeGeneratorARM::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3057 __ LoadFromOffset(kLoadWord,
3058 flag->GetLocations()->Out().AsRegister<Register>(),
3059 SP,
3060 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
3061}
3062
David Brazdil74eb1b22015-12-14 11:44:01 +00003063void LocationsBuilderARM::VisitSelect(HSelect* select) {
3064 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Donghui Bai426b49c2016-11-08 14:55:38 +08003065 const bool is_floating_point = Primitive::IsFloatingPointType(select->GetType());
3066
3067 if (is_floating_point) {
David Brazdil74eb1b22015-12-14 11:44:01 +00003068 locations->SetInAt(0, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003069 locations->SetInAt(1, Location::FpuRegisterOrConstant(select->GetTrueValue()));
David Brazdil74eb1b22015-12-14 11:44:01 +00003070 } else {
3071 locations->SetInAt(0, Location::RequiresRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003072 locations->SetInAt(1, Arm8BitEncodableConstantOrRegister(select->GetTrueValue()));
David Brazdil74eb1b22015-12-14 11:44:01 +00003073 }
Donghui Bai426b49c2016-11-08 14:55:38 +08003074
David Brazdil74eb1b22015-12-14 11:44:01 +00003075 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
Donghui Bai426b49c2016-11-08 14:55:38 +08003076 locations->SetInAt(2, Location::RegisterOrConstant(select->GetCondition()));
3077 // The code generator handles overlap with the values, but not with the condition.
3078 locations->SetOut(Location::SameAsFirstInput());
3079 } else if (is_floating_point) {
3080 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3081 } else {
3082 if (!locations->InAt(1).IsConstant()) {
3083 locations->SetInAt(0, Arm8BitEncodableConstantOrRegister(select->GetFalseValue()));
3084 }
3085
3086 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003087 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003088}
3089
3090void InstructionCodeGeneratorARM::VisitSelect(HSelect* select) {
Donghui Bai426b49c2016-11-08 14:55:38 +08003091 HInstruction* const condition = select->GetCondition();
3092 const LocationSummary* const locations = select->GetLocations();
3093 const Primitive::Type type = select->GetType();
3094 const Location first = locations->InAt(0);
3095 const Location out = locations->Out();
3096 const Location second = locations->InAt(1);
3097 Location src;
3098
3099 if (condition->IsIntConstant()) {
3100 if (condition->AsIntConstant()->IsFalse()) {
3101 src = first;
3102 } else {
3103 src = second;
3104 }
3105
3106 codegen_->MoveLocation(out, src, type);
3107 return;
3108 }
3109
3110 if (!Primitive::IsFloatingPointType(type) &&
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003111 (IsBooleanValueOrMaterializedCondition(condition) ||
3112 CanGenerateTest(condition->AsCondition(), codegen_->GetAssembler()))) {
Donghui Bai426b49c2016-11-08 14:55:38 +08003113 bool invert = false;
3114
3115 if (out.Equals(second)) {
3116 src = first;
3117 invert = true;
3118 } else if (out.Equals(first)) {
3119 src = second;
3120 } else if (second.IsConstant()) {
3121 DCHECK(CanEncodeConstantAs8BitImmediate(second.GetConstant()));
3122 src = second;
3123 } else if (first.IsConstant()) {
3124 DCHECK(CanEncodeConstantAs8BitImmediate(first.GetConstant()));
3125 src = first;
3126 invert = true;
3127 } else {
3128 src = second;
3129 }
3130
3131 if (CanGenerateConditionalMove(out, src)) {
3132 if (!out.Equals(first) && !out.Equals(second)) {
3133 codegen_->MoveLocation(out, src.Equals(first) ? second : first, type);
3134 }
3135
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003136 std::pair<Condition, Condition> cond;
3137
3138 if (IsBooleanValueOrMaterializedCondition(condition)) {
3139 __ CmpConstant(locations->InAt(2).AsRegister<Register>(), 0);
3140 cond = invert ? std::make_pair(EQ, NE) : std::make_pair(NE, EQ);
3141 } else {
3142 cond = GenerateTest(condition->AsCondition(), invert, codegen_);
3143 }
Donghui Bai426b49c2016-11-08 14:55:38 +08003144
3145 if (out.IsRegister()) {
3146 ShifterOperand operand;
3147
3148 if (src.IsConstant()) {
3149 operand = ShifterOperand(CodeGenerator::GetInt32ValueOf(src.GetConstant()));
3150 } else {
3151 DCHECK(src.IsRegister());
3152 operand = ShifterOperand(src.AsRegister<Register>());
3153 }
3154
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003155 __ it(cond.first);
3156 __ mov(out.AsRegister<Register>(), operand, cond.first);
Donghui Bai426b49c2016-11-08 14:55:38 +08003157 } else {
3158 DCHECK(out.IsRegisterPair());
3159
3160 ShifterOperand operand_high;
3161 ShifterOperand operand_low;
3162
3163 if (src.IsConstant()) {
3164 const int64_t value = src.GetConstant()->AsLongConstant()->GetValue();
3165
3166 operand_high = ShifterOperand(High32Bits(value));
3167 operand_low = ShifterOperand(Low32Bits(value));
3168 } else {
3169 DCHECK(src.IsRegisterPair());
3170 operand_high = ShifterOperand(src.AsRegisterPairHigh<Register>());
3171 operand_low = ShifterOperand(src.AsRegisterPairLow<Register>());
3172 }
3173
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003174 __ it(cond.first);
3175 __ mov(out.AsRegisterPairLow<Register>(), operand_low, cond.first);
3176 __ it(cond.first);
3177 __ mov(out.AsRegisterPairHigh<Register>(), operand_high, cond.first);
Donghui Bai426b49c2016-11-08 14:55:38 +08003178 }
3179
3180 return;
3181 }
3182 }
3183
3184 Label* false_target = nullptr;
3185 Label* true_target = nullptr;
3186 Label select_end;
3187 Label* target = codegen_->GetFinalLabel(select, &select_end);
3188
3189 if (out.Equals(second)) {
3190 true_target = target;
3191 src = first;
3192 } else {
3193 false_target = target;
3194 src = second;
3195
3196 if (!out.Equals(first)) {
3197 codegen_->MoveLocation(out, first, type);
3198 }
3199 }
3200
3201 GenerateTestAndBranch(select, 2, true_target, false_target);
3202 codegen_->MoveLocation(out, src, type);
3203
3204 if (select_end.IsLinked()) {
3205 __ Bind(&select_end);
3206 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003207}
3208
David Srbecky0cf44932015-12-09 14:09:59 +00003209void LocationsBuilderARM::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3210 new (GetGraph()->GetArena()) LocationSummary(info);
3211}
3212
David Srbeckyd28f4a02016-03-14 17:14:24 +00003213void InstructionCodeGeneratorARM::VisitNativeDebugInfo(HNativeDebugInfo*) {
3214 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003215}
3216
3217void CodeGeneratorARM::GenerateNop() {
3218 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003219}
3220
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003221// `temp` is an extra temporary register that is used for some conditions;
3222// callers may not specify it, in which case the method will use a scratch
3223// register instead.
3224void CodeGeneratorARM::GenerateConditionWithZero(IfCondition condition,
3225 Register out,
3226 Register in,
3227 Register temp) {
3228 switch (condition) {
3229 case kCondEQ:
3230 // x <= 0 iff x == 0 when the comparison is unsigned.
3231 case kCondBE:
3232 if (temp == kNoRegister || (ArmAssembler::IsLowRegister(out) && out != in)) {
3233 temp = out;
3234 }
3235
3236 // Avoid 32-bit instructions if possible; note that `in` and `temp` must be
3237 // different as well.
3238 if (ArmAssembler::IsLowRegister(in) && ArmAssembler::IsLowRegister(temp) && in != temp) {
3239 // temp = - in; only 0 sets the carry flag.
3240 __ rsbs(temp, in, ShifterOperand(0));
3241
3242 if (out == in) {
3243 std::swap(in, temp);
3244 }
3245
3246 // out = - in + in + carry = carry
3247 __ adc(out, temp, ShifterOperand(in));
3248 } else {
3249 // If `in` is 0, then it has 32 leading zeros, and less than that otherwise.
3250 __ clz(out, in);
3251 // Any number less than 32 logically shifted right by 5 bits results in 0;
3252 // the same operation on 32 yields 1.
3253 __ Lsr(out, out, 5);
3254 }
3255
3256 break;
3257 case kCondNE:
3258 // x > 0 iff x != 0 when the comparison is unsigned.
3259 case kCondA:
3260 if (out == in) {
3261 if (temp == kNoRegister || in == temp) {
3262 temp = IP;
3263 }
3264 } else if (temp == kNoRegister || !ArmAssembler::IsLowRegister(temp)) {
3265 temp = out;
3266 }
3267
3268 // temp = in - 1; only 0 does not set the carry flag.
3269 __ subs(temp, in, ShifterOperand(1));
3270 // out = in + ~temp + carry = in + (-(in - 1) - 1) + carry = in - in + 1 - 1 + carry = carry
3271 __ sbc(out, in, ShifterOperand(temp));
3272 break;
3273 case kCondGE:
3274 __ mvn(out, ShifterOperand(in));
3275 in = out;
3276 FALLTHROUGH_INTENDED;
3277 case kCondLT:
3278 // We only care about the sign bit.
3279 __ Lsr(out, in, 31);
3280 break;
3281 case kCondAE:
3282 // Trivially true.
3283 __ mov(out, ShifterOperand(1));
3284 break;
3285 case kCondB:
3286 // Trivially false.
3287 __ mov(out, ShifterOperand(0));
3288 break;
3289 default:
3290 LOG(FATAL) << "Unexpected condition " << condition;
3291 UNREACHABLE();
3292 }
3293}
3294
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003295void LocationsBuilderARM::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003296 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01003297 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Roland Levillain4fa13f62015-07-06 18:11:54 +01003298 // Handle the long/FP comparisons made in instruction simplification.
3299 switch (cond->InputAt(0)->GetType()) {
3300 case Primitive::kPrimLong:
3301 locations->SetInAt(0, Location::RequiresRegister());
3302 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
David Brazdilb3e773e2016-01-26 11:28:37 +00003303 if (!cond->IsEmittedAtUseSite()) {
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003304 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain4fa13f62015-07-06 18:11:54 +01003305 }
3306 break;
3307
3308 case Primitive::kPrimFloat:
3309 case Primitive::kPrimDouble:
3310 locations->SetInAt(0, Location::RequiresFpuRegister());
Vladimir Marko37dd80d2016-08-01 17:41:45 +01003311 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(cond->InputAt(1)));
David Brazdilb3e773e2016-01-26 11:28:37 +00003312 if (!cond->IsEmittedAtUseSite()) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01003313 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3314 }
3315 break;
3316
3317 default:
3318 locations->SetInAt(0, Location::RequiresRegister());
3319 locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
David Brazdilb3e773e2016-01-26 11:28:37 +00003320 if (!cond->IsEmittedAtUseSite()) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01003321 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3322 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01003323 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00003324}
3325
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003326void InstructionCodeGeneratorARM::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003327 if (cond->IsEmittedAtUseSite()) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01003328 return;
Dave Allison20dfc792014-06-16 20:44:29 -07003329 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01003330
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003331 const Primitive::Type type = cond->GetLeft()->GetType();
Roland Levillain4fa13f62015-07-06 18:11:54 +01003332
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003333 if (Primitive::IsFloatingPointType(type)) {
3334 GenerateConditionGeneric(cond, codegen_);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003335 return;
Roland Levillain4fa13f62015-07-06 18:11:54 +01003336 }
3337
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003338 DCHECK(Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) << type;
Roland Levillain4fa13f62015-07-06 18:11:54 +01003339
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003340 const IfCondition condition = cond->GetCondition();
Roland Levillain4fa13f62015-07-06 18:11:54 +01003341
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003342 // A condition with only one boolean input, or two boolean inputs without being equality or
3343 // inequality results from transformations done by the instruction simplifier, and is handled
3344 // as a regular condition with integral inputs.
3345 if (type == Primitive::kPrimBoolean &&
3346 cond->GetRight()->GetType() == Primitive::kPrimBoolean &&
3347 (condition == kCondEQ || condition == kCondNE)) {
3348 const LocationSummary* const locations = cond->GetLocations();
3349 Register left = locations->InAt(0).AsRegister<Register>();
3350 const Register out = locations->Out().AsRegister<Register>();
3351 const Location right_loc = locations->InAt(1);
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003352
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003353 // The constant case is handled by the instruction simplifier.
3354 DCHECK(!right_loc.IsConstant());
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003355
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003356 Register right = right_loc.AsRegister<Register>();
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003357
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003358 // Avoid 32-bit instructions if possible.
3359 if (out == right) {
3360 std::swap(left, right);
3361 }
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003362
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003363 __ eor(out, left, ShifterOperand(right));
3364
3365 if (condition == kCondEQ) {
3366 __ eor(out, out, ShifterOperand(1));
3367 }
3368
3369 return;
Anton Kirilov217b2ce2017-03-16 11:47:12 +00003370 }
Anton Kirilov6f644202017-02-27 18:29:45 +00003371
Anton Kirilov5601d4e2017-05-11 19:33:50 +01003372 GenerateConditionIntegralOrNonPrimitive(cond, codegen_);
Dave Allison20dfc792014-06-16 20:44:29 -07003373}
3374
3375void LocationsBuilderARM::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003376 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003377}
3378
3379void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003380 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003381}
3382
3383void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003384 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003385}
3386
3387void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003388 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003389}
3390
3391void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003392 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003393}
3394
3395void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003396 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003397}
3398
3399void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003400 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003401}
3402
3403void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003404 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003405}
3406
3407void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003408 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003409}
3410
3411void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003412 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003413}
3414
3415void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003416 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07003417}
3418
3419void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003420 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003421}
3422
Aart Bike9f37602015-10-09 11:15:55 -07003423void LocationsBuilderARM::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003424 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003425}
3426
3427void InstructionCodeGeneratorARM::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003428 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003429}
3430
3431void LocationsBuilderARM::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003432 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003433}
3434
3435void InstructionCodeGeneratorARM::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003436 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003437}
3438
3439void LocationsBuilderARM::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003440 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003441}
3442
3443void InstructionCodeGeneratorARM::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003444 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003445}
3446
3447void LocationsBuilderARM::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003448 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003449}
3450
3451void InstructionCodeGeneratorARM::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003452 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07003453}
3454
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003455void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003456 LocationSummary* locations =
3457 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003458 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00003459}
3460
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003461void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01003462 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003463}
3464
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003465void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
3466 LocationSummary* locations =
3467 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3468 locations->SetOut(Location::ConstantLocation(constant));
3469}
3470
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003471void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003472 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003473}
3474
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003475void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003476 LocationSummary* locations =
3477 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003478 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003479}
3480
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003481void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003482 // Will be generated at use site.
3483}
3484
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01003485void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
3486 LocationSummary* locations =
3487 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3488 locations->SetOut(Location::ConstantLocation(constant));
3489}
3490
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003491void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01003492 // Will be generated at use site.
3493}
3494
3495void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
3496 LocationSummary* locations =
3497 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3498 locations->SetOut(Location::ConstantLocation(constant));
3499}
3500
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003501void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01003502 // Will be generated at use site.
3503}
3504
Igor Murashkind01745e2017-04-05 16:40:31 -07003505void LocationsBuilderARM::VisitConstructorFence(HConstructorFence* constructor_fence) {
3506 constructor_fence->SetLocations(nullptr);
3507}
3508
3509void InstructionCodeGeneratorARM::VisitConstructorFence(
3510 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
3511 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3512}
3513
Calin Juravle27df7582015-04-17 19:12:31 +01003514void LocationsBuilderARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3515 memory_barrier->SetLocations(nullptr);
3516}
3517
3518void InstructionCodeGeneratorARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillainc9285912015-12-18 10:38:42 +00003519 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01003520}
3521
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003522void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00003523 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00003524}
3525
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003526void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00003527 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00003528}
3529
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003530void LocationsBuilderARM::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003531 LocationSummary* locations =
3532 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00003533 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003534}
3535
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003536void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00003537 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00003538}
3539
Calin Juravle175dc732015-08-25 15:42:32 +01003540void LocationsBuilderARM::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3541 // The trampoline uses the same calling convention as dex calling conventions,
3542 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3543 // the method_idx.
3544 HandleInvoke(invoke);
3545}
3546
3547void InstructionCodeGeneratorARM::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3548 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3549}
3550
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003551void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003552 // Explicit clinit checks triggered by static invokes must have been pruned by
3553 // art::PrepareForRegisterAllocation.
3554 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003555
Vladimir Marko68c981f2016-08-26 13:13:33 +01003556 IntrinsicLocationsBuilderARM intrinsic(codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08003557 if (intrinsic.TryDispatch(invoke)) {
3558 return;
3559 }
3560
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01003561 HandleInvoke(invoke);
3562}
3563
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08003564static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
3565 if (invoke->GetLocations()->Intrinsified()) {
3566 IntrinsicCodeGeneratorARM intrinsic(codegen);
3567 intrinsic.Dispatch(invoke);
3568 return true;
3569 }
3570 return false;
3571}
3572
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003573void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003574 // Explicit clinit checks triggered by static invokes must have been pruned by
3575 // art::PrepareForRegisterAllocation.
3576 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003577
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08003578 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3579 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003580 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01003581
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003582 LocationSummary* locations = invoke->GetLocations();
3583 codegen_->GenerateStaticOrDirectCall(
3584 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01003585}
3586
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01003587void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003588 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003589 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00003590}
3591
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003592void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Marko68c981f2016-08-26 13:13:33 +01003593 IntrinsicLocationsBuilderARM intrinsic(codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08003594 if (intrinsic.TryDispatch(invoke)) {
3595 return;
3596 }
3597
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003598 HandleInvoke(invoke);
3599}
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00003600
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01003601void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08003602 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3603 return;
3604 }
3605
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003606 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01003607 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00003608}
3609
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003610void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
3611 HandleInvoke(invoke);
3612 // Add the hidden argument.
3613 invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
3614}
3615
3616void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
3617 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain3b359c72015-11-17 19:35:12 +00003618 LocationSummary* locations = invoke->GetLocations();
3619 Register temp = locations->GetTemp(0).AsRegister<Register>();
3620 Register hidden_reg = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003621 Location receiver = locations->InAt(0);
3622 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3623
Roland Levillain3b359c72015-11-17 19:35:12 +00003624 // Set the hidden argument. This is safe to do this here, as R12
3625 // won't be modified thereafter, before the `blx` (call) instruction.
3626 DCHECK_EQ(R12, hidden_reg);
3627 __ LoadImmediate(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003628
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003629 if (receiver.IsStackSlot()) {
3630 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
Roland Levillain3b359c72015-11-17 19:35:12 +00003631 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003632 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
3633 } else {
Roland Levillain3b359c72015-11-17 19:35:12 +00003634 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00003635 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003636 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003637 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain3b359c72015-11-17 19:35:12 +00003638 // Instead of simply (possibly) unpoisoning `temp` here, we should
3639 // emit a read barrier for the previous class reference load.
3640 // However this is not required in practice, as this is an
3641 // intermediate/temporary reference and because the current
3642 // concurrent copying collector keeps the from-space memory
3643 // intact/accessible until the end of the marking phase (the
3644 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003645 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003646 __ LoadFromOffset(kLoadWord, temp, temp,
3647 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
3648 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003649 invoke->GetImtIndex(), kArmPointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003650 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003651 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
Roland Levillain3b359c72015-11-17 19:35:12 +00003652 uint32_t entry_point =
Andreas Gampe542451c2016-07-26 09:02:02 -07003653 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00003654 // LR = temp->GetEntryPoint();
3655 __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
3656 // LR();
3657 __ blx(LR);
3658 DCHECK(!codegen_->IsLeafMethod());
3659 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3660}
3661
Orion Hodsonac141392017-01-13 11:53:47 +00003662void LocationsBuilderARM::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3663 HandleInvoke(invoke);
3664}
3665
3666void InstructionCodeGeneratorARM::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3667 codegen_->GenerateInvokePolymorphicCall(invoke);
3668}
3669
Roland Levillain88cb1752014-10-20 16:36:47 +01003670void LocationsBuilderARM::VisitNeg(HNeg* neg) {
3671 LocationSummary* locations =
3672 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3673 switch (neg->GetResultType()) {
Nicolas Geoffray829280c2015-01-28 10:20:37 +00003674 case Primitive::kPrimInt: {
Roland Levillain88cb1752014-10-20 16:36:47 +01003675 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray829280c2015-01-28 10:20:37 +00003676 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3677 break;
3678 }
3679 case Primitive::kPrimLong: {
3680 locations->SetInAt(0, Location::RequiresRegister());
3681 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Roland Levillain88cb1752014-10-20 16:36:47 +01003682 break;
Roland Levillain2e07b4f2014-10-23 18:12:09 +01003683 }
Roland Levillain88cb1752014-10-20 16:36:47 +01003684
Roland Levillain88cb1752014-10-20 16:36:47 +01003685 case Primitive::kPrimFloat:
3686 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00003687 locations->SetInAt(0, Location::RequiresFpuRegister());
3688 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillain88cb1752014-10-20 16:36:47 +01003689 break;
3690
3691 default:
3692 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3693 }
3694}
3695
3696void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
3697 LocationSummary* locations = neg->GetLocations();
3698 Location out = locations->Out();
3699 Location in = locations->InAt(0);
3700 switch (neg->GetResultType()) {
3701 case Primitive::kPrimInt:
3702 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003703 __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
Roland Levillain88cb1752014-10-20 16:36:47 +01003704 break;
3705
3706 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01003707 DCHECK(in.IsRegisterPair());
3708 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
3709 __ rsbs(out.AsRegisterPairLow<Register>(),
3710 in.AsRegisterPairLow<Register>(),
3711 ShifterOperand(0));
3712 // We cannot emit an RSC (Reverse Subtract with Carry)
3713 // instruction here, as it does not exist in the Thumb-2
3714 // instruction set. We use the following approach
3715 // using SBC and SUB instead.
3716 //
3717 // out.hi = -C
3718 __ sbc(out.AsRegisterPairHigh<Register>(),
3719 out.AsRegisterPairHigh<Register>(),
3720 ShifterOperand(out.AsRegisterPairHigh<Register>()));
3721 // out.hi = out.hi - in.hi
3722 __ sub(out.AsRegisterPairHigh<Register>(),
3723 out.AsRegisterPairHigh<Register>(),
3724 ShifterOperand(in.AsRegisterPairHigh<Register>()));
3725 break;
3726
Roland Levillain88cb1752014-10-20 16:36:47 +01003727 case Primitive::kPrimFloat:
Roland Levillain3dbcb382014-10-28 17:30:07 +00003728 DCHECK(in.IsFpuRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003729 __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
Roland Levillain3dbcb382014-10-28 17:30:07 +00003730 break;
3731
Roland Levillain88cb1752014-10-20 16:36:47 +01003732 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00003733 DCHECK(in.IsFpuRegisterPair());
3734 __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
3735 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
Roland Levillain88cb1752014-10-20 16:36:47 +01003736 break;
3737
3738 default:
3739 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3740 }
3741}
3742
Roland Levillaindff1f282014-11-05 14:15:05 +00003743void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
Roland Levillaindff1f282014-11-05 14:15:05 +00003744 Primitive::Type result_type = conversion->GetResultType();
3745 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00003746 DCHECK_NE(result_type, input_type);
Roland Levillain624279f2014-12-04 11:54:28 +00003747
Roland Levillain5b3ee562015-04-14 16:02:41 +01003748 // The float-to-long, double-to-long and long-to-float type conversions
3749 // rely on a call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00003750 LocationSummary::CallKind call_kind =
Roland Levillain5b3ee562015-04-14 16:02:41 +01003751 (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
3752 && result_type == Primitive::kPrimLong)
3753 || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat))
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003754 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00003755 : LocationSummary::kNoCall;
3756 LocationSummary* locations =
3757 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
3758
David Brazdilb2bd1c52015-03-25 11:17:37 +00003759 // The Java language does not allow treating boolean as an integral type but
3760 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00003761
Roland Levillaindff1f282014-11-05 14:15:05 +00003762 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00003763 case Primitive::kPrimByte:
3764 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003765 case Primitive::kPrimLong:
3766 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00003767 case Primitive::kPrimBoolean:
3768 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00003769 case Primitive::kPrimShort:
3770 case Primitive::kPrimInt:
3771 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00003772 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00003773 locations->SetInAt(0, Location::RequiresRegister());
3774 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3775 break;
3776
3777 default:
3778 LOG(FATAL) << "Unexpected type conversion from " << input_type
3779 << " to " << result_type;
3780 }
3781 break;
3782
Roland Levillain01a8d712014-11-14 16:27:39 +00003783 case Primitive::kPrimShort:
3784 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003785 case Primitive::kPrimLong:
3786 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00003787 case Primitive::kPrimBoolean:
3788 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00003789 case Primitive::kPrimByte:
3790 case Primitive::kPrimInt:
3791 case Primitive::kPrimChar:
3792 // Processing a Dex `int-to-short' instruction.
3793 locations->SetInAt(0, Location::RequiresRegister());
3794 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3795 break;
3796
3797 default:
3798 LOG(FATAL) << "Unexpected type conversion from " << input_type
3799 << " to " << result_type;
3800 }
3801 break;
3802
Roland Levillain946e1432014-11-11 17:35:19 +00003803 case Primitive::kPrimInt:
3804 switch (input_type) {
3805 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00003806 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00003807 locations->SetInAt(0, Location::Any());
3808 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3809 break;
3810
3811 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00003812 // Processing a Dex `float-to-int' instruction.
3813 locations->SetInAt(0, Location::RequiresFpuRegister());
3814 locations->SetOut(Location::RequiresRegister());
3815 locations->AddTemp(Location::RequiresFpuRegister());
3816 break;
3817
Roland Levillain946e1432014-11-11 17:35:19 +00003818 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003819 // Processing a Dex `double-to-int' instruction.
3820 locations->SetInAt(0, Location::RequiresFpuRegister());
3821 locations->SetOut(Location::RequiresRegister());
3822 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00003823 break;
3824
3825 default:
3826 LOG(FATAL) << "Unexpected type conversion from " << input_type
3827 << " to " << result_type;
3828 }
3829 break;
3830
Roland Levillaindff1f282014-11-05 14:15:05 +00003831 case Primitive::kPrimLong:
3832 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003833 case Primitive::kPrimBoolean:
3834 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00003835 case Primitive::kPrimByte:
3836 case Primitive::kPrimShort:
3837 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00003838 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00003839 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00003840 locations->SetInAt(0, Location::RequiresRegister());
3841 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3842 break;
3843
Roland Levillain624279f2014-12-04 11:54:28 +00003844 case Primitive::kPrimFloat: {
3845 // Processing a Dex `float-to-long' instruction.
3846 InvokeRuntimeCallingConvention calling_convention;
3847 locations->SetInAt(0, Location::FpuRegisterLocation(
3848 calling_convention.GetFpuRegisterAt(0)));
3849 locations->SetOut(Location::RegisterPairLocation(R0, R1));
3850 break;
3851 }
3852
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003853 case Primitive::kPrimDouble: {
3854 // Processing a Dex `double-to-long' instruction.
3855 InvokeRuntimeCallingConvention calling_convention;
3856 locations->SetInAt(0, Location::FpuRegisterPairLocation(
3857 calling_convention.GetFpuRegisterAt(0),
3858 calling_convention.GetFpuRegisterAt(1)));
3859 locations->SetOut(Location::RegisterPairLocation(R0, R1));
Roland Levillaindff1f282014-11-05 14:15:05 +00003860 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003861 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003862
3863 default:
3864 LOG(FATAL) << "Unexpected type conversion from " << input_type
3865 << " to " << result_type;
3866 }
3867 break;
3868
Roland Levillain981e4542014-11-14 11:47:14 +00003869 case Primitive::kPrimChar:
3870 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003871 case Primitive::kPrimLong:
3872 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00003873 case Primitive::kPrimBoolean:
3874 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00003875 case Primitive::kPrimByte:
3876 case Primitive::kPrimShort:
3877 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00003878 // Processing a Dex `int-to-char' instruction.
3879 locations->SetInAt(0, Location::RequiresRegister());
3880 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3881 break;
3882
3883 default:
3884 LOG(FATAL) << "Unexpected type conversion from " << input_type
3885 << " to " << result_type;
3886 }
3887 break;
3888
Roland Levillaindff1f282014-11-05 14:15:05 +00003889 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00003890 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003891 case Primitive::kPrimBoolean:
3892 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00003893 case Primitive::kPrimByte:
3894 case Primitive::kPrimShort:
3895 case Primitive::kPrimInt:
3896 case Primitive::kPrimChar:
3897 // Processing a Dex `int-to-float' instruction.
3898 locations->SetInAt(0, Location::RequiresRegister());
3899 locations->SetOut(Location::RequiresFpuRegister());
3900 break;
3901
Roland Levillain5b3ee562015-04-14 16:02:41 +01003902 case Primitive::kPrimLong: {
Roland Levillain6d0e4832014-11-27 18:31:21 +00003903 // Processing a Dex `long-to-float' instruction.
Roland Levillain5b3ee562015-04-14 16:02:41 +01003904 InvokeRuntimeCallingConvention calling_convention;
3905 locations->SetInAt(0, Location::RegisterPairLocation(
3906 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3907 locations->SetOut(Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Roland Levillain6d0e4832014-11-27 18:31:21 +00003908 break;
Roland Levillain5b3ee562015-04-14 16:02:41 +01003909 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003910
Roland Levillaincff13742014-11-17 14:32:17 +00003911 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003912 // Processing a Dex `double-to-float' instruction.
3913 locations->SetInAt(0, Location::RequiresFpuRegister());
3914 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003915 break;
3916
3917 default:
3918 LOG(FATAL) << "Unexpected type conversion from " << input_type
3919 << " to " << result_type;
3920 };
3921 break;
3922
Roland Levillaindff1f282014-11-05 14:15:05 +00003923 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00003924 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003925 case Primitive::kPrimBoolean:
3926 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00003927 case Primitive::kPrimByte:
3928 case Primitive::kPrimShort:
3929 case Primitive::kPrimInt:
3930 case Primitive::kPrimChar:
3931 // Processing a Dex `int-to-double' instruction.
3932 locations->SetInAt(0, Location::RequiresRegister());
3933 locations->SetOut(Location::RequiresFpuRegister());
3934 break;
3935
3936 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003937 // Processing a Dex `long-to-double' instruction.
3938 locations->SetInAt(0, Location::RequiresRegister());
3939 locations->SetOut(Location::RequiresFpuRegister());
Roland Levillain682393c2015-04-14 15:57:52 +01003940 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain647b9ed2014-11-27 12:06:00 +00003941 locations->AddTemp(Location::RequiresFpuRegister());
3942 break;
3943
Roland Levillaincff13742014-11-17 14:32:17 +00003944 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003945 // Processing a Dex `float-to-double' instruction.
3946 locations->SetInAt(0, Location::RequiresFpuRegister());
3947 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00003948 break;
3949
3950 default:
3951 LOG(FATAL) << "Unexpected type conversion from " << input_type
3952 << " to " << result_type;
3953 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003954 break;
3955
3956 default:
3957 LOG(FATAL) << "Unexpected type conversion from " << input_type
3958 << " to " << result_type;
3959 }
3960}
3961
3962void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
3963 LocationSummary* locations = conversion->GetLocations();
3964 Location out = locations->Out();
3965 Location in = locations->InAt(0);
3966 Primitive::Type result_type = conversion->GetResultType();
3967 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00003968 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00003969 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00003970 case Primitive::kPrimByte:
3971 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003972 case Primitive::kPrimLong:
3973 // Type conversion from long to byte is a result of code transformations.
3974 __ sbfx(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>(), 0, 8);
3975 break;
David Brazdil46e2a392015-03-16 17:31:52 +00003976 case Primitive::kPrimBoolean:
3977 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00003978 case Primitive::kPrimShort:
3979 case Primitive::kPrimInt:
3980 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00003981 // Processing a Dex `int-to-byte' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00003982 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
Roland Levillain51d3fc42014-11-13 14:11:42 +00003983 break;
3984
3985 default:
3986 LOG(FATAL) << "Unexpected type conversion from " << input_type
3987 << " to " << result_type;
3988 }
3989 break;
3990
Roland Levillain01a8d712014-11-14 16:27:39 +00003991 case Primitive::kPrimShort:
3992 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00003993 case Primitive::kPrimLong:
3994 // Type conversion from long to short is a result of code transformations.
3995 __ sbfx(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>(), 0, 16);
3996 break;
David Brazdil46e2a392015-03-16 17:31:52 +00003997 case Primitive::kPrimBoolean:
3998 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00003999 case Primitive::kPrimByte:
4000 case Primitive::kPrimInt:
4001 case Primitive::kPrimChar:
4002 // Processing a Dex `int-to-short' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00004003 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
Roland Levillain01a8d712014-11-14 16:27:39 +00004004 break;
4005
4006 default:
4007 LOG(FATAL) << "Unexpected type conversion from " << input_type
4008 << " to " << result_type;
4009 }
4010 break;
4011
Roland Levillain946e1432014-11-11 17:35:19 +00004012 case Primitive::kPrimInt:
4013 switch (input_type) {
4014 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00004015 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00004016 DCHECK(out.IsRegister());
4017 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004018 __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00004019 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004020 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
Roland Levillain946e1432014-11-11 17:35:19 +00004021 } else {
4022 DCHECK(in.IsConstant());
4023 DCHECK(in.GetConstant()->IsLongConstant());
4024 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004025 __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
Roland Levillain946e1432014-11-11 17:35:19 +00004026 }
4027 break;
4028
Roland Levillain3f8f9362014-12-02 17:45:01 +00004029 case Primitive::kPrimFloat: {
4030 // Processing a Dex `float-to-int' instruction.
4031 SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
Vladimir Marko8c5d3102016-07-07 12:07:44 +01004032 __ vcvtis(temp, in.AsFpuRegister<SRegister>());
Roland Levillain3f8f9362014-12-02 17:45:01 +00004033 __ vmovrs(out.AsRegister<Register>(), temp);
4034 break;
4035 }
4036
Roland Levillain4c0b61f2014-12-05 12:06:01 +00004037 case Primitive::kPrimDouble: {
4038 // Processing a Dex `double-to-int' instruction.
4039 SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
Vladimir Marko8c5d3102016-07-07 12:07:44 +01004040 __ vcvtid(temp_s, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00004041 __ vmovrs(out.AsRegister<Register>(), temp_s);
Roland Levillain946e1432014-11-11 17:35:19 +00004042 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00004043 }
Roland Levillain946e1432014-11-11 17:35:19 +00004044
4045 default:
4046 LOG(FATAL) << "Unexpected type conversion from " << input_type
4047 << " to " << result_type;
4048 }
4049 break;
4050
Roland Levillaindff1f282014-11-05 14:15:05 +00004051 case Primitive::kPrimLong:
4052 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00004053 case Primitive::kPrimBoolean:
4054 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00004055 case Primitive::kPrimByte:
4056 case Primitive::kPrimShort:
4057 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00004058 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00004059 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00004060 DCHECK(out.IsRegisterPair());
4061 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00004062 __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
Roland Levillaindff1f282014-11-05 14:15:05 +00004063 // Sign extension.
4064 __ Asr(out.AsRegisterPairHigh<Register>(),
4065 out.AsRegisterPairLow<Register>(),
4066 31);
4067 break;
4068
4069 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00004070 // Processing a Dex `float-to-long' instruction.
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004071 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004072 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00004073 break;
4074
Roland Levillaindff1f282014-11-05 14:15:05 +00004075 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00004076 // Processing a Dex `double-to-long' instruction.
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004077 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004078 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00004079 break;
4080
4081 default:
4082 LOG(FATAL) << "Unexpected type conversion from " << input_type
4083 << " to " << result_type;
4084 }
4085 break;
4086
Roland Levillain981e4542014-11-14 11:47:14 +00004087 case Primitive::kPrimChar:
4088 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00004089 case Primitive::kPrimLong:
4090 // Type conversion from long to char is a result of code transformations.
4091 __ ubfx(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>(), 0, 16);
4092 break;
David Brazdil46e2a392015-03-16 17:31:52 +00004093 case Primitive::kPrimBoolean:
4094 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00004095 case Primitive::kPrimByte:
4096 case Primitive::kPrimShort:
4097 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00004098 // Processing a Dex `int-to-char' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00004099 __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
Roland Levillain981e4542014-11-14 11:47:14 +00004100 break;
4101
4102 default:
4103 LOG(FATAL) << "Unexpected type conversion from " << input_type
4104 << " to " << result_type;
4105 }
4106 break;
4107
Roland Levillaindff1f282014-11-05 14:15:05 +00004108 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00004109 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00004110 case Primitive::kPrimBoolean:
4111 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00004112 case Primitive::kPrimByte:
4113 case Primitive::kPrimShort:
4114 case Primitive::kPrimInt:
4115 case Primitive::kPrimChar: {
4116 // Processing a Dex `int-to-float' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00004117 __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
4118 __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00004119 break;
4120 }
4121
Roland Levillain5b3ee562015-04-14 16:02:41 +01004122 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00004123 // Processing a Dex `long-to-float' instruction.
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004124 codegen_->InvokeRuntime(kQuickL2f, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004125 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
Roland Levillain6d0e4832014-11-27 18:31:21 +00004126 break;
Roland Levillain6d0e4832014-11-27 18:31:21 +00004127
Roland Levillaincff13742014-11-17 14:32:17 +00004128 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00004129 // Processing a Dex `double-to-float' instruction.
4130 __ vcvtsd(out.AsFpuRegister<SRegister>(),
4131 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
Roland Levillaincff13742014-11-17 14:32:17 +00004132 break;
4133
4134 default:
4135 LOG(FATAL) << "Unexpected type conversion from " << input_type
4136 << " to " << result_type;
4137 };
4138 break;
4139
Roland Levillaindff1f282014-11-05 14:15:05 +00004140 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00004141 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00004142 case Primitive::kPrimBoolean:
4143 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00004144 case Primitive::kPrimByte:
4145 case Primitive::kPrimShort:
4146 case Primitive::kPrimInt:
4147 case Primitive::kPrimChar: {
4148 // Processing a Dex `int-to-double' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00004149 __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00004150 __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4151 out.AsFpuRegisterPairLow<SRegister>());
4152 break;
4153 }
4154
Roland Levillain647b9ed2014-11-27 12:06:00 +00004155 case Primitive::kPrimLong: {
4156 // Processing a Dex `long-to-double' instruction.
4157 Register low = in.AsRegisterPairLow<Register>();
4158 Register high = in.AsRegisterPairHigh<Register>();
4159 SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
4160 DRegister out_d = FromLowSToD(out_s);
Roland Levillain682393c2015-04-14 15:57:52 +01004161 SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
Roland Levillain647b9ed2014-11-27 12:06:00 +00004162 DRegister temp_d = FromLowSToD(temp_s);
Roland Levillain682393c2015-04-14 15:57:52 +01004163 SRegister constant_s = locations->GetTemp(1).AsFpuRegisterPairLow<SRegister>();
4164 DRegister constant_d = FromLowSToD(constant_s);
Roland Levillain647b9ed2014-11-27 12:06:00 +00004165
Roland Levillain682393c2015-04-14 15:57:52 +01004166 // temp_d = int-to-double(high)
4167 __ vmovsr(temp_s, high);
4168 __ vcvtdi(temp_d, temp_s);
4169 // constant_d = k2Pow32EncodingForDouble
4170 __ LoadDImmediate(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
4171 // out_d = unsigned-to-double(low)
4172 __ vmovsr(out_s, low);
4173 __ vcvtdu(out_d, out_s);
4174 // out_d += temp_d * constant_d
4175 __ vmlad(out_d, temp_d, constant_d);
Roland Levillain647b9ed2014-11-27 12:06:00 +00004176 break;
4177 }
4178
Roland Levillaincff13742014-11-17 14:32:17 +00004179 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00004180 // Processing a Dex `float-to-double' instruction.
4181 __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4182 in.AsFpuRegister<SRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00004183 break;
4184
4185 default:
4186 LOG(FATAL) << "Unexpected type conversion from " << input_type
4187 << " to " << result_type;
4188 };
Roland Levillaindff1f282014-11-05 14:15:05 +00004189 break;
4190
4191 default:
4192 LOG(FATAL) << "Unexpected type conversion from " << input_type
4193 << " to " << result_type;
4194 }
4195}
4196
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004197void LocationsBuilderARM::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004198 LocationSummary* locations =
4199 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004200 switch (add->GetResultType()) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004201 case Primitive::kPrimInt: {
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004202 locations->SetInAt(0, Location::RequiresRegister());
4203 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004204 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4205 break;
4206 }
4207
4208 case Primitive::kPrimLong: {
4209 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko59751a72016-08-05 14:37:27 +01004210 locations->SetInAt(1, ArmEncodableConstantOrRegister(add->InputAt(1), ADD));
Nicolas Geoffray829280c2015-01-28 10:20:37 +00004211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004212 break;
4213 }
4214
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004215 case Primitive::kPrimFloat:
4216 case Primitive::kPrimDouble: {
4217 locations->SetInAt(0, Location::RequiresFpuRegister());
4218 locations->SetInAt(1, Location::RequiresFpuRegister());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004219 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004220 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004221 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004222
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004223 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004224 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004225 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004226}
4227
4228void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
4229 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004230 Location out = locations->Out();
4231 Location first = locations->InAt(0);
4232 Location second = locations->InAt(1);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004233 switch (add->GetResultType()) {
4234 case Primitive::kPrimInt:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004235 if (second.IsRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00004236 __ add(out.AsRegister<Register>(),
4237 first.AsRegister<Register>(),
4238 ShifterOperand(second.AsRegister<Register>()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004239 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004240 __ AddConstant(out.AsRegister<Register>(),
4241 first.AsRegister<Register>(),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004242 second.GetConstant()->AsIntConstant()->GetValue());
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004243 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004244 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004245
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004246 case Primitive::kPrimLong: {
Vladimir Marko59751a72016-08-05 14:37:27 +01004247 if (second.IsConstant()) {
4248 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
4249 GenerateAddLongConst(out, first, value);
4250 } else {
4251 DCHECK(second.IsRegisterPair());
4252 __ adds(out.AsRegisterPairLow<Register>(),
4253 first.AsRegisterPairLow<Register>(),
4254 ShifterOperand(second.AsRegisterPairLow<Register>()));
4255 __ adc(out.AsRegisterPairHigh<Register>(),
4256 first.AsRegisterPairHigh<Register>(),
4257 ShifterOperand(second.AsRegisterPairHigh<Register>()));
4258 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004259 break;
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004260 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004261
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004262 case Primitive::kPrimFloat:
Roland Levillain199f3362014-11-27 17:15:16 +00004263 __ vadds(out.AsFpuRegister<SRegister>(),
4264 first.AsFpuRegister<SRegister>(),
4265 second.AsFpuRegister<SRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004266 break;
4267
4268 case Primitive::kPrimDouble:
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00004269 __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4270 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
4271 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004272 break;
4273
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004274 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01004275 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00004276 }
4277}
4278
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004279void LocationsBuilderARM::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004280 LocationSummary* locations =
4281 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004282 switch (sub->GetResultType()) {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004283 case Primitive::kPrimInt: {
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004284 locations->SetInAt(0, Location::RequiresRegister());
4285 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004286 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4287 break;
4288 }
4289
4290 case Primitive::kPrimLong: {
4291 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko59751a72016-08-05 14:37:27 +01004292 locations->SetInAt(1, ArmEncodableConstantOrRegister(sub->InputAt(1), SUB));
Nicolas Geoffray829280c2015-01-28 10:20:37 +00004293 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004294 break;
4295 }
Calin Juravle11351682014-10-23 15:38:15 +01004296 case Primitive::kPrimFloat:
4297 case Primitive::kPrimDouble: {
4298 locations->SetInAt(0, Location::RequiresFpuRegister());
4299 locations->SetInAt(1, Location::RequiresFpuRegister());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004300 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004301 break;
Calin Juravle11351682014-10-23 15:38:15 +01004302 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004303 default:
Calin Juravle11351682014-10-23 15:38:15 +01004304 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004305 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004306}
4307
4308void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
4309 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01004310 Location out = locations->Out();
4311 Location first = locations->InAt(0);
4312 Location second = locations->InAt(1);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004313 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004314 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01004315 if (second.IsRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00004316 __ sub(out.AsRegister<Register>(),
4317 first.AsRegister<Register>(),
4318 ShifterOperand(second.AsRegister<Register>()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004319 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004320 __ AddConstant(out.AsRegister<Register>(),
4321 first.AsRegister<Register>(),
Calin Juravle11351682014-10-23 15:38:15 +01004322 -second.GetConstant()->AsIntConstant()->GetValue());
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004323 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004324 break;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004325 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004326
Calin Juravle11351682014-10-23 15:38:15 +01004327 case Primitive::kPrimLong: {
Vladimir Marko59751a72016-08-05 14:37:27 +01004328 if (second.IsConstant()) {
4329 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
4330 GenerateAddLongConst(out, first, -value);
4331 } else {
4332 DCHECK(second.IsRegisterPair());
4333 __ subs(out.AsRegisterPairLow<Register>(),
4334 first.AsRegisterPairLow<Register>(),
4335 ShifterOperand(second.AsRegisterPairLow<Register>()));
4336 __ sbc(out.AsRegisterPairHigh<Register>(),
4337 first.AsRegisterPairHigh<Register>(),
4338 ShifterOperand(second.AsRegisterPairHigh<Register>()));
4339 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004340 break;
Calin Juravle11351682014-10-23 15:38:15 +01004341 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004342
Calin Juravle11351682014-10-23 15:38:15 +01004343 case Primitive::kPrimFloat: {
Roland Levillain199f3362014-11-27 17:15:16 +00004344 __ vsubs(out.AsFpuRegister<SRegister>(),
4345 first.AsFpuRegister<SRegister>(),
4346 second.AsFpuRegister<SRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004347 break;
Calin Juravle11351682014-10-23 15:38:15 +01004348 }
4349
4350 case Primitive::kPrimDouble: {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00004351 __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4352 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
4353 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
Calin Juravle11351682014-10-23 15:38:15 +01004354 break;
4355 }
4356
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01004357
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004358 default:
Calin Juravle11351682014-10-23 15:38:15 +01004359 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004360 }
4361}
4362
Calin Juravle34bacdf2014-10-07 20:23:36 +01004363void LocationsBuilderARM::VisitMul(HMul* mul) {
4364 LocationSummary* locations =
4365 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4366 switch (mul->GetResultType()) {
4367 case Primitive::kPrimInt:
4368 case Primitive::kPrimLong: {
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004369 locations->SetInAt(0, Location::RequiresRegister());
4370 locations->SetInAt(1, Location::RequiresRegister());
4371 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Calin Juravle34bacdf2014-10-07 20:23:36 +01004372 break;
4373 }
4374
Calin Juravleb5bfa962014-10-21 18:02:24 +01004375 case Primitive::kPrimFloat:
4376 case Primitive::kPrimDouble: {
4377 locations->SetInAt(0, Location::RequiresFpuRegister());
4378 locations->SetInAt(1, Location::RequiresFpuRegister());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004379 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Calin Juravle34bacdf2014-10-07 20:23:36 +01004380 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01004381 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01004382
4383 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01004384 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01004385 }
4386}
4387
4388void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
4389 LocationSummary* locations = mul->GetLocations();
4390 Location out = locations->Out();
4391 Location first = locations->InAt(0);
4392 Location second = locations->InAt(1);
4393 switch (mul->GetResultType()) {
4394 case Primitive::kPrimInt: {
Roland Levillain199f3362014-11-27 17:15:16 +00004395 __ mul(out.AsRegister<Register>(),
4396 first.AsRegister<Register>(),
4397 second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01004398 break;
4399 }
4400 case Primitive::kPrimLong: {
4401 Register out_hi = out.AsRegisterPairHigh<Register>();
4402 Register out_lo = out.AsRegisterPairLow<Register>();
4403 Register in1_hi = first.AsRegisterPairHigh<Register>();
4404 Register in1_lo = first.AsRegisterPairLow<Register>();
4405 Register in2_hi = second.AsRegisterPairHigh<Register>();
4406 Register in2_lo = second.AsRegisterPairLow<Register>();
4407
4408 // Extra checks to protect caused by the existence of R1_R2.
4409 // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
4410 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
4411 DCHECK_NE(out_hi, in1_lo);
4412 DCHECK_NE(out_hi, in2_lo);
4413
4414 // input: in1 - 64 bits, in2 - 64 bits
4415 // output: out
4416 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
4417 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
4418 // parts: out.lo = (in1.lo * in2.lo)[31:0]
4419
4420 // IP <- in1.lo * in2.hi
4421 __ mul(IP, in1_lo, in2_hi);
4422 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
4423 __ mla(out_hi, in1_hi, in2_lo, IP);
4424 // out.lo <- (in1.lo * in2.lo)[31:0];
4425 __ umull(out_lo, IP, in1_lo, in2_lo);
4426 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
4427 __ add(out_hi, out_hi, ShifterOperand(IP));
4428 break;
4429 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01004430
4431 case Primitive::kPrimFloat: {
Roland Levillain199f3362014-11-27 17:15:16 +00004432 __ vmuls(out.AsFpuRegister<SRegister>(),
4433 first.AsFpuRegister<SRegister>(),
4434 second.AsFpuRegister<SRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01004435 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01004436 }
4437
4438 case Primitive::kPrimDouble: {
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +00004439 __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4440 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
4441 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
Calin Juravleb5bfa962014-10-21 18:02:24 +01004442 break;
4443 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01004444
4445 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01004446 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01004447 }
4448}
4449
Zheng Xuc6667102015-05-15 16:08:45 +08004450void InstructionCodeGeneratorARM::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
4451 DCHECK(instruction->IsDiv() || instruction->IsRem());
4452 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
4453
4454 LocationSummary* locations = instruction->GetLocations();
4455 Location second = locations->InAt(1);
4456 DCHECK(second.IsConstant());
4457
4458 Register out = locations->Out().AsRegister<Register>();
4459 Register dividend = locations->InAt(0).AsRegister<Register>();
4460 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
4461 DCHECK(imm == 1 || imm == -1);
4462
4463 if (instruction->IsRem()) {
4464 __ LoadImmediate(out, 0);
4465 } else {
4466 if (imm == 1) {
4467 __ Mov(out, dividend);
4468 } else {
4469 __ rsb(out, dividend, ShifterOperand(0));
4470 }
4471 }
4472}
4473
4474void InstructionCodeGeneratorARM::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
4475 DCHECK(instruction->IsDiv() || instruction->IsRem());
4476 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
4477
4478 LocationSummary* locations = instruction->GetLocations();
4479 Location second = locations->InAt(1);
4480 DCHECK(second.IsConstant());
4481
4482 Register out = locations->Out().AsRegister<Register>();
4483 Register dividend = locations->InAt(0).AsRegister<Register>();
4484 Register temp = locations->GetTemp(0).AsRegister<Register>();
4485 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004486 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08004487 int ctz_imm = CTZ(abs_imm);
4488
4489 if (ctz_imm == 1) {
4490 __ Lsr(temp, dividend, 32 - ctz_imm);
4491 } else {
4492 __ Asr(temp, dividend, 31);
4493 __ Lsr(temp, temp, 32 - ctz_imm);
4494 }
4495 __ add(out, temp, ShifterOperand(dividend));
4496
4497 if (instruction->IsDiv()) {
4498 __ Asr(out, out, ctz_imm);
4499 if (imm < 0) {
4500 __ rsb(out, out, ShifterOperand(0));
4501 }
4502 } else {
4503 __ ubfx(out, out, 0, ctz_imm);
4504 __ sub(out, out, ShifterOperand(temp));
4505 }
4506}
4507
4508void InstructionCodeGeneratorARM::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
4509 DCHECK(instruction->IsDiv() || instruction->IsRem());
4510 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
4511
4512 LocationSummary* locations = instruction->GetLocations();
4513 Location second = locations->InAt(1);
4514 DCHECK(second.IsConstant());
4515
4516 Register out = locations->Out().AsRegister<Register>();
4517 Register dividend = locations->InAt(0).AsRegister<Register>();
4518 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
4519 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
4520 int64_t imm = second.GetConstant()->AsIntConstant()->GetValue();
4521
4522 int64_t magic;
4523 int shift;
4524 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
4525
4526 __ LoadImmediate(temp1, magic);
4527 __ smull(temp2, temp1, dividend, temp1);
4528
4529 if (imm > 0 && magic < 0) {
4530 __ add(temp1, temp1, ShifterOperand(dividend));
4531 } else if (imm < 0 && magic > 0) {
4532 __ sub(temp1, temp1, ShifterOperand(dividend));
4533 }
4534
4535 if (shift != 0) {
4536 __ Asr(temp1, temp1, shift);
4537 }
4538
4539 if (instruction->IsDiv()) {
4540 __ sub(out, temp1, ShifterOperand(temp1, ASR, 31));
4541 } else {
4542 __ sub(temp1, temp1, ShifterOperand(temp1, ASR, 31));
4543 // TODO: Strength reduction for mls.
4544 __ LoadImmediate(temp2, imm);
4545 __ mls(out, temp1, temp2, dividend);
4546 }
4547}
4548
4549void InstructionCodeGeneratorARM::GenerateDivRemConstantIntegral(HBinaryOperation* instruction) {
4550 DCHECK(instruction->IsDiv() || instruction->IsRem());
4551 DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
4552
4553 LocationSummary* locations = instruction->GetLocations();
4554 Location second = locations->InAt(1);
4555 DCHECK(second.IsConstant());
4556
4557 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
4558 if (imm == 0) {
4559 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
4560 } else if (imm == 1 || imm == -1) {
4561 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004562 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08004563 DivRemByPowerOfTwo(instruction);
4564 } else {
4565 DCHECK(imm <= -2 || imm >= 2);
4566 GenerateDivRemWithAnyConstant(instruction);
4567 }
4568}
4569
Calin Juravle7c4954d2014-10-28 16:57:40 +00004570void LocationsBuilderARM::VisitDiv(HDiv* div) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004571 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
4572 if (div->GetResultType() == Primitive::kPrimLong) {
4573 // pLdiv runtime call.
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004574 call_kind = LocationSummary::kCallOnMainOnly;
Zheng Xuc6667102015-05-15 16:08:45 +08004575 } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) {
4576 // sdiv will be replaced by other instruction sequence.
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004577 } else if (div->GetResultType() == Primitive::kPrimInt &&
4578 !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
4579 // pIdivmod runtime call.
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004580 call_kind = LocationSummary::kCallOnMainOnly;
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004581 }
4582
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004583 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
4584
Calin Juravle7c4954d2014-10-28 16:57:40 +00004585 switch (div->GetResultType()) {
Calin Juravled0d48522014-11-04 16:40:20 +00004586 case Primitive::kPrimInt: {
Zheng Xuc6667102015-05-15 16:08:45 +08004587 if (div->InputAt(1)->IsConstant()) {
4588 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko13c86fd2015-11-11 12:37:46 +00004589 locations->SetInAt(1, Location::ConstantLocation(div->InputAt(1)->AsConstant()));
Zheng Xuc6667102015-05-15 16:08:45 +08004590 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004591 int32_t value = div->InputAt(1)->AsIntConstant()->GetValue();
4592 if (value == 1 || value == 0 || value == -1) {
Zheng Xuc6667102015-05-15 16:08:45 +08004593 // No temp register required.
4594 } else {
4595 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004596 if (!IsPowerOfTwo(AbsOrMin(value))) {
Zheng Xuc6667102015-05-15 16:08:45 +08004597 locations->AddTemp(Location::RequiresRegister());
4598 }
4599 }
4600 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004601 locations->SetInAt(0, Location::RequiresRegister());
4602 locations->SetInAt(1, Location::RequiresRegister());
4603 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4604 } else {
4605 InvokeRuntimeCallingConvention calling_convention;
4606 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4607 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004608 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004609 // we only need the former.
4610 locations->SetOut(Location::RegisterLocation(R0));
4611 }
Calin Juravled0d48522014-11-04 16:40:20 +00004612 break;
4613 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00004614 case Primitive::kPrimLong: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004615 InvokeRuntimeCallingConvention calling_convention;
4616 locations->SetInAt(0, Location::RegisterPairLocation(
4617 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4618 locations->SetInAt(1, Location::RegisterPairLocation(
4619 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004620 locations->SetOut(Location::RegisterPairLocation(R0, R1));
Calin Juravle7c4954d2014-10-28 16:57:40 +00004621 break;
4622 }
4623 case Primitive::kPrimFloat:
4624 case Primitive::kPrimDouble: {
4625 locations->SetInAt(0, Location::RequiresFpuRegister());
4626 locations->SetInAt(1, Location::RequiresFpuRegister());
4627 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4628 break;
4629 }
4630
4631 default:
4632 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4633 }
4634}
4635
4636void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
4637 LocationSummary* locations = div->GetLocations();
4638 Location out = locations->Out();
4639 Location first = locations->InAt(0);
4640 Location second = locations->InAt(1);
4641
4642 switch (div->GetResultType()) {
Calin Juravled0d48522014-11-04 16:40:20 +00004643 case Primitive::kPrimInt: {
Zheng Xuc6667102015-05-15 16:08:45 +08004644 if (second.IsConstant()) {
4645 GenerateDivRemConstantIntegral(div);
4646 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004647 __ sdiv(out.AsRegister<Register>(),
4648 first.AsRegister<Register>(),
4649 second.AsRegister<Register>());
4650 } else {
4651 InvokeRuntimeCallingConvention calling_convention;
4652 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
4653 DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
4654 DCHECK_EQ(R0, out.AsRegister<Register>());
4655
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004656 codegen_->InvokeRuntime(kQuickIdivmod, div, div->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004657 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004658 }
Calin Juravled0d48522014-11-04 16:40:20 +00004659 break;
4660 }
4661
Calin Juravle7c4954d2014-10-28 16:57:40 +00004662 case Primitive::kPrimLong: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004663 InvokeRuntimeCallingConvention calling_convention;
4664 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
4665 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
4666 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
4667 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
4668 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00004669 DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004670
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004671 codegen_->InvokeRuntime(kQuickLdiv, div, div->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004672 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravle7c4954d2014-10-28 16:57:40 +00004673 break;
4674 }
4675
4676 case Primitive::kPrimFloat: {
Roland Levillain199f3362014-11-27 17:15:16 +00004677 __ vdivs(out.AsFpuRegister<SRegister>(),
4678 first.AsFpuRegister<SRegister>(),
4679 second.AsFpuRegister<SRegister>());
Calin Juravle7c4954d2014-10-28 16:57:40 +00004680 break;
4681 }
4682
4683 case Primitive::kPrimDouble: {
4684 __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
4685 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
4686 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
4687 break;
4688 }
4689
4690 default:
4691 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
4692 }
4693}
4694
Calin Juravlebacfec32014-11-14 15:54:36 +00004695void LocationsBuilderARM::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00004696 Primitive::Type type = rem->GetResultType();
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004697
4698 // Most remainders are implemented in the runtime.
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004699 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
Zheng Xuc6667102015-05-15 16:08:45 +08004700 if (rem->GetResultType() == Primitive::kPrimInt && rem->InputAt(1)->IsConstant()) {
4701 // sdiv will be replaced by other instruction sequence.
4702 call_kind = LocationSummary::kNoCall;
4703 } else if ((rem->GetResultType() == Primitive::kPrimInt)
4704 && codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004705 // Have hardware divide instruction for int, do it with three instructions.
4706 call_kind = LocationSummary::kNoCall;
4707 }
4708
Calin Juravlebacfec32014-11-14 15:54:36 +00004709 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4710
Calin Juravled2ec87d2014-12-08 14:24:46 +00004711 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00004712 case Primitive::kPrimInt: {
Zheng Xuc6667102015-05-15 16:08:45 +08004713 if (rem->InputAt(1)->IsConstant()) {
4714 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko13c86fd2015-11-11 12:37:46 +00004715 locations->SetInAt(1, Location::ConstantLocation(rem->InputAt(1)->AsConstant()));
Zheng Xuc6667102015-05-15 16:08:45 +08004716 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004717 int32_t value = rem->InputAt(1)->AsIntConstant()->GetValue();
4718 if (value == 1 || value == 0 || value == -1) {
Zheng Xuc6667102015-05-15 16:08:45 +08004719 // No temp register required.
4720 } else {
4721 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00004722 if (!IsPowerOfTwo(AbsOrMin(value))) {
Zheng Xuc6667102015-05-15 16:08:45 +08004723 locations->AddTemp(Location::RequiresRegister());
4724 }
4725 }
4726 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004727 locations->SetInAt(0, Location::RequiresRegister());
4728 locations->SetInAt(1, Location::RequiresRegister());
4729 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4730 locations->AddTemp(Location::RequiresRegister());
4731 } else {
4732 InvokeRuntimeCallingConvention calling_convention;
4733 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4734 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004735 // Note: divmod will compute both the quotient and the remainder as the pair R0 and R1, but
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004736 // we only need the latter.
4737 locations->SetOut(Location::RegisterLocation(R1));
4738 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004739 break;
4740 }
4741 case Primitive::kPrimLong: {
4742 InvokeRuntimeCallingConvention calling_convention;
4743 locations->SetInAt(0, Location::RegisterPairLocation(
4744 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
4745 locations->SetInAt(1, Location::RegisterPairLocation(
4746 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
4747 // The runtime helper puts the output in R2,R3.
4748 locations->SetOut(Location::RegisterPairLocation(R2, R3));
4749 break;
4750 }
Calin Juravled2ec87d2014-12-08 14:24:46 +00004751 case Primitive::kPrimFloat: {
4752 InvokeRuntimeCallingConvention calling_convention;
4753 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
4754 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
4755 locations->SetOut(Location::FpuRegisterLocation(S0));
4756 break;
4757 }
4758
Calin Juravlebacfec32014-11-14 15:54:36 +00004759 case Primitive::kPrimDouble: {
Calin Juravled2ec87d2014-12-08 14:24:46 +00004760 InvokeRuntimeCallingConvention calling_convention;
4761 locations->SetInAt(0, Location::FpuRegisterPairLocation(
4762 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
4763 locations->SetInAt(1, Location::FpuRegisterPairLocation(
4764 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
4765 locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
Calin Juravlebacfec32014-11-14 15:54:36 +00004766 break;
4767 }
4768
4769 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004770 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004771 }
4772}
4773
4774void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
4775 LocationSummary* locations = rem->GetLocations();
4776 Location out = locations->Out();
4777 Location first = locations->InAt(0);
4778 Location second = locations->InAt(1);
4779
Calin Juravled2ec87d2014-12-08 14:24:46 +00004780 Primitive::Type type = rem->GetResultType();
4781 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00004782 case Primitive::kPrimInt: {
Zheng Xuc6667102015-05-15 16:08:45 +08004783 if (second.IsConstant()) {
4784 GenerateDivRemConstantIntegral(rem);
4785 } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004786 Register reg1 = first.AsRegister<Register>();
4787 Register reg2 = second.AsRegister<Register>();
4788 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravlebacfec32014-11-14 15:54:36 +00004789
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004790 // temp = reg1 / reg2 (integer division)
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01004791 // dest = reg1 - temp * reg2
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004792 __ sdiv(temp, reg1, reg2);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01004793 __ mls(out.AsRegister<Register>(), temp, reg2, reg1);
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004794 } else {
4795 InvokeRuntimeCallingConvention calling_convention;
4796 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
4797 DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
4798 DCHECK_EQ(R1, out.AsRegister<Register>());
4799
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004800 codegen_->InvokeRuntime(kQuickIdivmod, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004801 CheckEntrypointTypes<kQuickIdivmod, int32_t, int32_t, int32_t>();
Andreas Gampeb51cdb32015-03-29 17:32:48 -07004802 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004803 break;
4804 }
4805
4806 case Primitive::kPrimLong: {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004807 codegen_->InvokeRuntime(kQuickLmod, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004808 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00004809 break;
4810 }
4811
Calin Juravled2ec87d2014-12-08 14:24:46 +00004812 case Primitive::kPrimFloat: {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004813 codegen_->InvokeRuntime(kQuickFmodf, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004814 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Calin Juravled2ec87d2014-12-08 14:24:46 +00004815 break;
4816 }
4817
Calin Juravlebacfec32014-11-14 15:54:36 +00004818 case Primitive::kPrimDouble: {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01004819 codegen_->InvokeRuntime(kQuickFmod, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004820 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Calin Juravlebacfec32014-11-14 15:54:36 +00004821 break;
4822 }
4823
4824 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004825 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004826 }
4827}
4828
Calin Juravled0d48522014-11-04 16:40:20 +00004829void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004830 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004831 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Calin Juravled0d48522014-11-04 16:40:20 +00004832}
4833
4834void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Artem Serovf4d6aee2016-07-11 10:41:45 +01004835 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004836 codegen_->AddSlowPath(slow_path);
4837
4838 LocationSummary* locations = instruction->GetLocations();
4839 Location value = locations->InAt(0);
4840
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004841 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00004842 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06004843 case Primitive::kPrimByte:
4844 case Primitive::kPrimChar:
4845 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004846 case Primitive::kPrimInt: {
4847 if (value.IsRegister()) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01004848 __ CompareAndBranchIfZero(value.AsRegister<Register>(), slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004849 } else {
4850 DCHECK(value.IsConstant()) << value;
4851 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
4852 __ b(slow_path->GetEntryLabel());
4853 }
4854 }
4855 break;
4856 }
4857 case Primitive::kPrimLong: {
4858 if (value.IsRegisterPair()) {
4859 __ orrs(IP,
4860 value.AsRegisterPairLow<Register>(),
4861 ShifterOperand(value.AsRegisterPairHigh<Register>()));
4862 __ b(slow_path->GetEntryLabel(), EQ);
4863 } else {
4864 DCHECK(value.IsConstant()) << value;
4865 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
4866 __ b(slow_path->GetEntryLabel());
4867 }
4868 }
4869 break;
4870 default:
4871 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
4872 }
4873 }
Calin Juravled0d48522014-11-04 16:40:20 +00004874}
4875
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004876void InstructionCodeGeneratorARM::HandleIntegerRotate(LocationSummary* locations) {
4877 Register in = locations->InAt(0).AsRegister<Register>();
4878 Location rhs = locations->InAt(1);
4879 Register out = locations->Out().AsRegister<Register>();
4880
4881 if (rhs.IsConstant()) {
4882 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
4883 // so map all rotations to a +ve. equivalent in that range.
4884 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
4885 uint32_t rot = CodeGenerator::GetInt32ValueOf(rhs.GetConstant()) & 0x1F;
4886 if (rot) {
4887 // Rotate, mapping left rotations to right equivalents if necessary.
4888 // (e.g. left by 2 bits == right by 30.)
4889 __ Ror(out, in, rot);
4890 } else if (out != in) {
4891 __ Mov(out, in);
4892 }
4893 } else {
4894 __ Ror(out, in, rhs.AsRegister<Register>());
4895 }
4896}
4897
4898// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
4899// rotates by swapping input regs (effectively rotating by the first 32-bits of
4900// a larger rotation) or flipping direction (thus treating larger right/left
4901// rotations as sub-word sized rotations in the other direction) as appropriate.
Anton Kirilov6f644202017-02-27 18:29:45 +00004902void InstructionCodeGeneratorARM::HandleLongRotate(HRor* ror) {
4903 LocationSummary* locations = ror->GetLocations();
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004904 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
4905 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
4906 Location rhs = locations->InAt(1);
4907 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
4908 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
4909
4910 if (rhs.IsConstant()) {
4911 uint64_t rot = CodeGenerator::GetInt64ValueOf(rhs.GetConstant());
4912 // Map all rotations to +ve. equivalents on the interval [0,63].
Roland Levillain5b5b9312016-03-22 14:57:31 +00004913 rot &= kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004914 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
4915 // logic below to a simple pair of binary orr.
4916 // (e.g. 34 bits == in_reg swap + 2 bits right.)
4917 if (rot >= kArmBitsPerWord) {
4918 rot -= kArmBitsPerWord;
4919 std::swap(in_reg_hi, in_reg_lo);
4920 }
4921 // Rotate, or mov to out for zero or word size rotations.
4922 if (rot != 0u) {
4923 __ Lsr(out_reg_hi, in_reg_hi, rot);
4924 __ orr(out_reg_hi, out_reg_hi, ShifterOperand(in_reg_lo, arm::LSL, kArmBitsPerWord - rot));
4925 __ Lsr(out_reg_lo, in_reg_lo, rot);
4926 __ orr(out_reg_lo, out_reg_lo, ShifterOperand(in_reg_hi, arm::LSL, kArmBitsPerWord - rot));
4927 } else {
4928 __ Mov(out_reg_lo, in_reg_lo);
4929 __ Mov(out_reg_hi, in_reg_hi);
4930 }
4931 } else {
4932 Register shift_right = locations->GetTemp(0).AsRegister<Register>();
4933 Register shift_left = locations->GetTemp(1).AsRegister<Register>();
4934 Label end;
4935 Label shift_by_32_plus_shift_right;
Anton Kirilov6f644202017-02-27 18:29:45 +00004936 Label* final_label = codegen_->GetFinalLabel(ror, &end);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004937
4938 __ and_(shift_right, rhs.AsRegister<Register>(), ShifterOperand(0x1F));
4939 __ Lsrs(shift_left, rhs.AsRegister<Register>(), 6);
4940 __ rsb(shift_left, shift_right, ShifterOperand(kArmBitsPerWord), AL, kCcKeep);
4941 __ b(&shift_by_32_plus_shift_right, CC);
4942
4943 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
4944 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
4945 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
4946 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
4947 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
4948 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
4949 __ Lsr(shift_left, in_reg_hi, shift_right);
4950 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_left));
Anton Kirilov6f644202017-02-27 18:29:45 +00004951 __ b(final_label);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004952
4953 __ Bind(&shift_by_32_plus_shift_right); // Shift by 32+shift_right.
4954 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
4955 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
4956 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
4957 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
4958 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
4959 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
4960 __ Lsl(shift_right, in_reg_hi, shift_left);
4961 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_right));
4962
Anton Kirilov6f644202017-02-27 18:29:45 +00004963 if (end.IsLinked()) {
4964 __ Bind(&end);
4965 }
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004966 }
4967}
Roland Levillain22c49222016-03-18 14:04:28 +00004968
4969void LocationsBuilderARM::VisitRor(HRor* ror) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004970 LocationSummary* locations =
4971 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4972 switch (ror->GetResultType()) {
4973 case Primitive::kPrimInt: {
4974 locations->SetInAt(0, Location::RequiresRegister());
4975 locations->SetInAt(1, Location::RegisterOrConstant(ror->InputAt(1)));
4976 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4977 break;
4978 }
4979 case Primitive::kPrimLong: {
4980 locations->SetInAt(0, Location::RequiresRegister());
4981 if (ror->InputAt(1)->IsConstant()) {
4982 locations->SetInAt(1, Location::ConstantLocation(ror->InputAt(1)->AsConstant()));
4983 } else {
4984 locations->SetInAt(1, Location::RequiresRegister());
4985 locations->AddTemp(Location::RequiresRegister());
4986 locations->AddTemp(Location::RequiresRegister());
4987 }
4988 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4989 break;
4990 }
4991 default:
4992 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4993 }
4994}
4995
Roland Levillain22c49222016-03-18 14:04:28 +00004996void InstructionCodeGeneratorARM::VisitRor(HRor* ror) {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004997 LocationSummary* locations = ror->GetLocations();
4998 Primitive::Type type = ror->GetResultType();
4999 switch (type) {
5000 case Primitive::kPrimInt: {
5001 HandleIntegerRotate(locations);
5002 break;
5003 }
5004 case Primitive::kPrimLong: {
Anton Kirilov6f644202017-02-27 18:29:45 +00005005 HandleLongRotate(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005006 break;
5007 }
5008 default:
5009 LOG(FATAL) << "Unexpected operation type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005010 UNREACHABLE();
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005011 }
5012}
5013
Calin Juravle9aec02f2014-11-18 23:06:35 +00005014void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
5015 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
5016
Guillaume "Vermeille" Sanchezfd18f5a2015-03-11 14:57:40 +00005017 LocationSummary* locations =
5018 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00005019
5020 switch (op->GetResultType()) {
5021 case Primitive::kPrimInt: {
5022 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005023 if (op->InputAt(1)->IsConstant()) {
5024 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
5025 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5026 } else {
5027 locations->SetInAt(1, Location::RequiresRegister());
5028 // Make the output overlap, as it will be used to hold the masked
5029 // second input.
5030 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5031 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00005032 break;
5033 }
5034 case Primitive::kPrimLong: {
Guillaume "Vermeille" Sanchezfd18f5a2015-03-11 14:57:40 +00005035 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005036 if (op->InputAt(1)->IsConstant()) {
5037 locations->SetInAt(1, Location::ConstantLocation(op->InputAt(1)->AsConstant()));
5038 // For simplicity, use kOutputOverlap even though we only require that low registers
5039 // don't clash with high registers which the register allocator currently guarantees.
5040 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5041 } else {
5042 locations->SetInAt(1, Location::RequiresRegister());
5043 locations->AddTemp(Location::RequiresRegister());
5044 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
5045 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00005046 break;
5047 }
5048 default:
5049 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
5050 }
5051}
5052
5053void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
5054 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
5055
5056 LocationSummary* locations = op->GetLocations();
5057 Location out = locations->Out();
5058 Location first = locations->InAt(0);
5059 Location second = locations->InAt(1);
5060
5061 Primitive::Type type = op->GetResultType();
5062 switch (type) {
5063 case Primitive::kPrimInt: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005064 Register out_reg = out.AsRegister<Register>();
5065 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00005066 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005067 Register second_reg = second.AsRegister<Register>();
Roland Levillainc9285912015-12-18 10:38:42 +00005068 // ARM doesn't mask the shift count so we need to do it ourselves.
Roland Levillain5b5b9312016-03-22 14:57:31 +00005069 __ and_(out_reg, second_reg, ShifterOperand(kMaxIntShiftDistance));
Calin Juravle9aec02f2014-11-18 23:06:35 +00005070 if (op->IsShl()) {
Nicolas Geoffraya4f35812015-06-22 23:12:45 +01005071 __ Lsl(out_reg, first_reg, out_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00005072 } else if (op->IsShr()) {
Nicolas Geoffraya4f35812015-06-22 23:12:45 +01005073 __ Asr(out_reg, first_reg, out_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00005074 } else {
Nicolas Geoffraya4f35812015-06-22 23:12:45 +01005075 __ Lsr(out_reg, first_reg, out_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00005076 }
5077 } else {
5078 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain5b5b9312016-03-22 14:57:31 +00005079 uint32_t shift_value = cst & kMaxIntShiftDistance;
Roland Levillainc9285912015-12-18 10:38:42 +00005080 if (shift_value == 0) { // ARM does not support shifting with 0 immediate.
Calin Juravle9aec02f2014-11-18 23:06:35 +00005081 __ Mov(out_reg, first_reg);
5082 } else if (op->IsShl()) {
5083 __ Lsl(out_reg, first_reg, shift_value);
5084 } else if (op->IsShr()) {
5085 __ Asr(out_reg, first_reg, shift_value);
5086 } else {
5087 __ Lsr(out_reg, first_reg, shift_value);
5088 }
5089 }
5090 break;
5091 }
5092 case Primitive::kPrimLong: {
Guillaume "Vermeille" Sanchezfd18f5a2015-03-11 14:57:40 +00005093 Register o_h = out.AsRegisterPairHigh<Register>();
5094 Register o_l = out.AsRegisterPairLow<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00005095
Guillaume "Vermeille" Sanchezfd18f5a2015-03-11 14:57:40 +00005096 Register high = first.AsRegisterPairHigh<Register>();
5097 Register low = first.AsRegisterPairLow<Register>();
5098
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005099 if (second.IsRegister()) {
5100 Register temp = locations->GetTemp(0).AsRegister<Register>();
Guillaume "Vermeille" Sanchezfd18f5a2015-03-11 14:57:40 +00005101
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005102 Register second_reg = second.AsRegister<Register>();
5103
5104 if (op->IsShl()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00005105 __ and_(o_l, second_reg, ShifterOperand(kMaxLongShiftDistance));
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005106 // Shift the high part
5107 __ Lsl(o_h, high, o_l);
5108 // Shift the low part and `or` what overflew on the high part
5109 __ rsb(temp, o_l, ShifterOperand(kArmBitsPerWord));
5110 __ Lsr(temp, low, temp);
5111 __ orr(o_h, o_h, ShifterOperand(temp));
5112 // If the shift is > 32 bits, override the high part
5113 __ subs(temp, o_l, ShifterOperand(kArmBitsPerWord));
5114 __ it(PL);
5115 __ Lsl(o_h, low, temp, PL);
5116 // Shift the low part
5117 __ Lsl(o_l, low, o_l);
5118 } else if (op->IsShr()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00005119 __ and_(o_h, second_reg, ShifterOperand(kMaxLongShiftDistance));
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005120 // Shift the low part
5121 __ Lsr(o_l, low, o_h);
5122 // Shift the high part and `or` what underflew on the low part
5123 __ rsb(temp, o_h, ShifterOperand(kArmBitsPerWord));
5124 __ Lsl(temp, high, temp);
5125 __ orr(o_l, o_l, ShifterOperand(temp));
5126 // If the shift is > 32 bits, override the low part
5127 __ subs(temp, o_h, ShifterOperand(kArmBitsPerWord));
5128 __ it(PL);
5129 __ Asr(o_l, high, temp, PL);
5130 // Shift the high part
5131 __ Asr(o_h, high, o_h);
5132 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00005133 __ and_(o_h, second_reg, ShifterOperand(kMaxLongShiftDistance));
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005134 // same as Shr except we use `Lsr`s and not `Asr`s
5135 __ Lsr(o_l, low, o_h);
5136 __ rsb(temp, o_h, ShifterOperand(kArmBitsPerWord));
5137 __ Lsl(temp, high, temp);
5138 __ orr(o_l, o_l, ShifterOperand(temp));
5139 __ subs(temp, o_h, ShifterOperand(kArmBitsPerWord));
5140 __ it(PL);
5141 __ Lsr(o_l, high, temp, PL);
5142 __ Lsr(o_h, high, o_h);
5143 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00005144 } else {
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005145 // Register allocator doesn't create partial overlap.
5146 DCHECK_NE(o_l, high);
5147 DCHECK_NE(o_h, low);
5148 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain5b5b9312016-03-22 14:57:31 +00005149 uint32_t shift_value = cst & kMaxLongShiftDistance;
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005150 if (shift_value > 32) {
5151 if (op->IsShl()) {
5152 __ Lsl(o_h, low, shift_value - 32);
5153 __ LoadImmediate(o_l, 0);
5154 } else if (op->IsShr()) {
5155 __ Asr(o_l, high, shift_value - 32);
5156 __ Asr(o_h, high, 31);
5157 } else {
5158 __ Lsr(o_l, high, shift_value - 32);
5159 __ LoadImmediate(o_h, 0);
5160 }
5161 } else if (shift_value == 32) {
5162 if (op->IsShl()) {
5163 __ mov(o_h, ShifterOperand(low));
5164 __ LoadImmediate(o_l, 0);
5165 } else if (op->IsShr()) {
5166 __ mov(o_l, ShifterOperand(high));
5167 __ Asr(o_h, high, 31);
5168 } else {
5169 __ mov(o_l, ShifterOperand(high));
5170 __ LoadImmediate(o_h, 0);
5171 }
Vladimir Markof9d741e2015-11-20 15:08:11 +00005172 } else if (shift_value == 1) {
5173 if (op->IsShl()) {
5174 __ Lsls(o_l, low, 1);
5175 __ adc(o_h, high, ShifterOperand(high));
5176 } else if (op->IsShr()) {
5177 __ Asrs(o_h, high, 1);
5178 __ Rrx(o_l, low);
5179 } else {
5180 __ Lsrs(o_h, high, 1);
5181 __ Rrx(o_l, low);
5182 }
5183 } else {
5184 DCHECK(2 <= shift_value && shift_value < 32) << shift_value;
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005185 if (op->IsShl()) {
5186 __ Lsl(o_h, high, shift_value);
5187 __ orr(o_h, o_h, ShifterOperand(low, LSR, 32 - shift_value));
5188 __ Lsl(o_l, low, shift_value);
5189 } else if (op->IsShr()) {
5190 __ Lsr(o_l, low, shift_value);
5191 __ orr(o_l, o_l, ShifterOperand(high, LSL, 32 - shift_value));
5192 __ Asr(o_h, high, shift_value);
5193 } else {
5194 __ Lsr(o_l, low, shift_value);
5195 __ orr(o_l, o_l, ShifterOperand(high, LSL, 32 - shift_value));
5196 __ Lsr(o_h, high, shift_value);
5197 }
5198 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00005199 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00005200 break;
5201 }
5202 default:
5203 LOG(FATAL) << "Unexpected operation type " << type;
Vladimir Marko33ad10e2015-11-10 19:31:26 +00005204 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00005205 }
5206}
5207
5208void LocationsBuilderARM::VisitShl(HShl* shl) {
5209 HandleShift(shl);
5210}
5211
5212void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
5213 HandleShift(shl);
5214}
5215
5216void LocationsBuilderARM::VisitShr(HShr* shr) {
5217 HandleShift(shr);
5218}
5219
5220void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
5221 HandleShift(shr);
5222}
5223
5224void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
5225 HandleShift(ushr);
5226}
5227
5228void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
5229 HandleShift(ushr);
5230}
5231
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005232void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005233 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005234 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
David Brazdil6de19382016-01-08 17:37:10 +00005235 if (instruction->IsStringAlloc()) {
5236 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
5237 } else {
5238 InvokeRuntimeCallingConvention calling_convention;
5239 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00005240 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01005241 locations->SetOut(Location::RegisterLocation(R0));
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005242}
5243
5244void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005245 // Note: if heap poisoning is enabled, the entry point takes cares
5246 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00005247 if (instruction->IsStringAlloc()) {
5248 // String is allocated through StringFactory. Call NewEmptyString entry point.
5249 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07005250 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00005251 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
5252 __ LoadFromOffset(kLoadWord, LR, temp, code_offset.Int32Value());
5253 __ blx(LR);
5254 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
5255 } else {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01005256 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00005257 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00005258 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01005259}
5260
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005261void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
5262 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005263 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005264 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005265 locations->SetOut(Location::RegisterLocation(R0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005266 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5267 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005268}
5269
5270void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005271 // Note: if heap poisoning is enabled, the entry point takes cares
5272 // of poisoning the reference.
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00005273 QuickEntrypointEnum entrypoint =
5274 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5275 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005276 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00005277 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01005278}
5279
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005280void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005281 LocationSummary* locations =
5282 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01005283 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5284 if (location.IsStackSlot()) {
5285 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5286 } else if (location.IsDoubleStackSlot()) {
5287 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005288 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01005289 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005290}
5291
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005292void InstructionCodeGeneratorARM::VisitParameterValue(
5293 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01005294 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005295}
5296
5297void LocationsBuilderARM::VisitCurrentMethod(HCurrentMethod* instruction) {
5298 LocationSummary* locations =
5299 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5300 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
5301}
5302
5303void InstructionCodeGeneratorARM::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5304 // Nothing to do, the method is already at its location.
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01005305}
5306
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005307void LocationsBuilderARM::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005308 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005309 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005310 locations->SetInAt(0, Location::RequiresRegister());
5311 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01005312}
5313
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005314void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
5315 LocationSummary* locations = not_->GetLocations();
5316 Location out = locations->Out();
5317 Location in = locations->InAt(0);
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005318 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005319 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00005320 __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005321 break;
5322
5323 case Primitive::kPrimLong:
Roland Levillain70566432014-10-24 16:20:17 +01005324 __ mvn(out.AsRegisterPairLow<Register>(),
5325 ShifterOperand(in.AsRegisterPairLow<Register>()));
5326 __ mvn(out.AsRegisterPairHigh<Register>(),
5327 ShifterOperand(in.AsRegisterPairHigh<Register>()));
Roland Levillain1cc5f2512014-10-22 18:06:21 +01005328 break;
5329
5330 default:
5331 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
5332 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01005333}
5334
David Brazdil66d126e2015-04-03 16:02:44 +01005335void LocationsBuilderARM::VisitBooleanNot(HBooleanNot* bool_not) {
5336 LocationSummary* locations =
5337 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
5338 locations->SetInAt(0, Location::RequiresRegister());
5339 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5340}
5341
5342void InstructionCodeGeneratorARM::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01005343 LocationSummary* locations = bool_not->GetLocations();
5344 Location out = locations->Out();
5345 Location in = locations->InAt(0);
5346 __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
5347}
5348
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005349void LocationsBuilderARM::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005350 LocationSummary* locations =
5351 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00005352 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00005353 case Primitive::kPrimBoolean:
5354 case Primitive::kPrimByte:
5355 case Primitive::kPrimShort:
5356 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08005357 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00005358 case Primitive::kPrimLong: {
5359 locations->SetInAt(0, Location::RequiresRegister());
5360 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray829280c2015-01-28 10:20:37 +00005361 // Output overlaps because it is written before doing the low comparison.
5362 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Calin Juravleddb7df22014-11-25 20:56:51 +00005363 break;
5364 }
5365 case Primitive::kPrimFloat:
5366 case Primitive::kPrimDouble: {
5367 locations->SetInAt(0, Location::RequiresFpuRegister());
Vladimir Marko37dd80d2016-08-01 17:41:45 +01005368 locations->SetInAt(1, ArithmeticZeroOrFpuRegister(compare->InputAt(1)));
Calin Juravleddb7df22014-11-25 20:56:51 +00005369 locations->SetOut(Location::RequiresRegister());
5370 break;
5371 }
5372 default:
5373 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
5374 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005375}
5376
5377void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005378 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00005379 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00005380 Location left = locations->InAt(0);
5381 Location right = locations->InAt(1);
5382
Vladimir Markocf93a5c2015-06-16 11:33:24 +00005383 Label less, greater, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00005384 Label* final_label = codegen_->GetFinalLabel(compare, &done);
Calin Juravleddb7df22014-11-25 20:56:51 +00005385 Primitive::Type type = compare->InputAt(0)->GetType();
Vladimir Markod6e069b2016-01-18 11:11:01 +00005386 Condition less_cond;
Calin Juravleddb7df22014-11-25 20:56:51 +00005387 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00005388 case Primitive::kPrimBoolean:
5389 case Primitive::kPrimByte:
5390 case Primitive::kPrimShort:
5391 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08005392 case Primitive::kPrimInt: {
5393 __ LoadImmediate(out, 0);
5394 __ cmp(left.AsRegister<Register>(),
5395 ShifterOperand(right.AsRegister<Register>())); // Signed compare.
5396 less_cond = LT;
5397 break;
5398 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005399 case Primitive::kPrimLong: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01005400 __ cmp(left.AsRegisterPairHigh<Register>(),
5401 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare.
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005402 __ b(&less, LT);
5403 __ b(&greater, GT);
Roland Levillain4fa13f62015-07-06 18:11:54 +01005404 // Do LoadImmediate before the last `cmp`, as LoadImmediate might affect the status flags.
Calin Juravleddb7df22014-11-25 20:56:51 +00005405 __ LoadImmediate(out, 0);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01005406 __ cmp(left.AsRegisterPairLow<Register>(),
5407 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare.
Vladimir Markod6e069b2016-01-18 11:11:01 +00005408 less_cond = LO;
Calin Juravleddb7df22014-11-25 20:56:51 +00005409 break;
5410 }
5411 case Primitive::kPrimFloat:
5412 case Primitive::kPrimDouble: {
5413 __ LoadImmediate(out, 0);
Donghui Bai426b49c2016-11-08 14:55:38 +08005414 GenerateVcmp(compare, codegen_);
Calin Juravleddb7df22014-11-25 20:56:51 +00005415 __ vmstat(); // transfer FP status register to ARM APSR.
Vladimir Markod6e069b2016-01-18 11:11:01 +00005416 less_cond = ARMFPCondition(kCondLT, compare->IsGtBias());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005417 break;
5418 }
5419 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00005420 LOG(FATAL) << "Unexpected compare type " << type;
Vladimir Markod6e069b2016-01-18 11:11:01 +00005421 UNREACHABLE();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005422 }
Aart Bika19616e2016-02-01 18:57:58 -08005423
Anton Kirilov6f644202017-02-27 18:29:45 +00005424 __ b(final_label, EQ);
Vladimir Markod6e069b2016-01-18 11:11:01 +00005425 __ b(&less, less_cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00005426
5427 __ Bind(&greater);
5428 __ LoadImmediate(out, 1);
Anton Kirilov6f644202017-02-27 18:29:45 +00005429 __ b(final_label);
Calin Juravleddb7df22014-11-25 20:56:51 +00005430
5431 __ Bind(&less);
5432 __ LoadImmediate(out, -1);
5433
Anton Kirilov6f644202017-02-27 18:29:45 +00005434 if (done.IsLinked()) {
5435 __ Bind(&done);
5436 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005437}
5438
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005439void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005440 LocationSummary* locations =
5441 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005442 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01005443 locations->SetInAt(i, Location::Any());
5444 }
5445 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005446}
5447
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005448void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005449 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01005450}
5451
Roland Levillainc9285912015-12-18 10:38:42 +00005452void CodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
5453 // TODO (ported from quick): revisit ARM barrier kinds.
5454 DmbOptions flavor = DmbOptions::ISH; // Quiet C++ warnings.
Calin Juravle52c48962014-12-16 17:02:57 +00005455 switch (kind) {
5456 case MemBarrierKind::kAnyStore:
5457 case MemBarrierKind::kLoadAny:
5458 case MemBarrierKind::kAnyAny: {
Kenny Root1d8199d2015-06-02 11:01:10 -07005459 flavor = DmbOptions::ISH;
Calin Juravle52c48962014-12-16 17:02:57 +00005460 break;
5461 }
5462 case MemBarrierKind::kStoreStore: {
Kenny Root1d8199d2015-06-02 11:01:10 -07005463 flavor = DmbOptions::ISHST;
Calin Juravle52c48962014-12-16 17:02:57 +00005464 break;
5465 }
5466 default:
5467 LOG(FATAL) << "Unexpected memory barrier " << kind;
5468 }
Kenny Root1d8199d2015-06-02 11:01:10 -07005469 __ dmb(flavor);
Calin Juravle52c48962014-12-16 17:02:57 +00005470}
5471
5472void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
5473 uint32_t offset,
5474 Register out_lo,
5475 Register out_hi) {
5476 if (offset != 0) {
Roland Levillain3b359c72015-11-17 19:35:12 +00005477 // Ensure `out_lo` is different from `addr`, so that loading
5478 // `offset` into `out_lo` does not clutter `addr`.
5479 DCHECK_NE(out_lo, addr);
Calin Juravle52c48962014-12-16 17:02:57 +00005480 __ LoadImmediate(out_lo, offset);
Nicolas Geoffraybdcedd32015-01-09 08:48:29 +00005481 __ add(IP, addr, ShifterOperand(out_lo));
5482 addr = IP;
Calin Juravle52c48962014-12-16 17:02:57 +00005483 }
5484 __ ldrexd(out_lo, out_hi, addr);
5485}
5486
5487void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
5488 uint32_t offset,
5489 Register value_lo,
5490 Register value_hi,
5491 Register temp1,
Calin Juravle77520bc2015-01-12 18:45:46 +00005492 Register temp2,
5493 HInstruction* instruction) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00005494 Label fail;
Calin Juravle52c48962014-12-16 17:02:57 +00005495 if (offset != 0) {
5496 __ LoadImmediate(temp1, offset);
Nicolas Geoffraybdcedd32015-01-09 08:48:29 +00005497 __ add(IP, addr, ShifterOperand(temp1));
5498 addr = IP;
Calin Juravle52c48962014-12-16 17:02:57 +00005499 }
5500 __ Bind(&fail);
5501 // We need a load followed by store. (The address used in a STREX instruction must
5502 // be the same as the address in the most recently executed LDREX instruction.)
5503 __ ldrexd(temp1, temp2, addr);
Calin Juravle77520bc2015-01-12 18:45:46 +00005504 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005505 __ strexd(temp1, value_lo, value_hi, addr);
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01005506 __ CompareAndBranchIfNonZero(temp1, &fail);
Calin Juravle52c48962014-12-16 17:02:57 +00005507}
5508
5509void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
5510 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5511
Nicolas Geoffray39468442014-09-02 15:17:15 +01005512 LocationSummary* locations =
5513 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005514 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34166012014-12-19 17:22:29 +00005515
Calin Juravle52c48962014-12-16 17:02:57 +00005516 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005517 if (Primitive::IsFloatingPointType(field_type)) {
5518 locations->SetInAt(1, Location::RequiresFpuRegister());
5519 } else {
5520 locations->SetInAt(1, Location::RequiresRegister());
5521 }
5522
Calin Juravle52c48962014-12-16 17:02:57 +00005523 bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
Calin Juravle34166012014-12-19 17:22:29 +00005524 bool generate_volatile = field_info.IsVolatile()
5525 && is_wide
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005526 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Roland Levillain4d027112015-07-01 15:41:14 +01005527 bool needs_write_barrier =
5528 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005529 // Temporary registers for the write barrier.
Calin Juravle52c48962014-12-16 17:02:57 +00005530 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
Roland Levillain4d027112015-07-01 15:41:14 +01005531 if (needs_write_barrier) {
5532 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005533 locations->AddTemp(Location::RequiresRegister());
Calin Juravle34166012014-12-19 17:22:29 +00005534 } else if (generate_volatile) {
Roland Levillainc9285912015-12-18 10:38:42 +00005535 // ARM encoding have some additional constraints for ldrexd/strexd:
Calin Juravle52c48962014-12-16 17:02:57 +00005536 // - registers need to be consecutive
5537 // - the first register should be even but not R14.
Roland Levillainc9285912015-12-18 10:38:42 +00005538 // We don't test for ARM yet, and the assertion makes sure that we
5539 // revisit this if we ever enable ARM encoding.
Calin Juravle52c48962014-12-16 17:02:57 +00005540 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5541
5542 locations->AddTemp(Location::RequiresRegister());
5543 locations->AddTemp(Location::RequiresRegister());
5544 if (field_type == Primitive::kPrimDouble) {
5545 // For doubles we need two more registers to copy the value.
5546 locations->AddTemp(Location::RegisterLocation(R2));
5547 locations->AddTemp(Location::RegisterLocation(R3));
5548 }
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01005549 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005550}
5551
Calin Juravle52c48962014-12-16 17:02:57 +00005552void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005553 const FieldInfo& field_info,
5554 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00005555 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
5556
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005557 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00005558 Register base = locations->InAt(0).AsRegister<Register>();
5559 Location value = locations->InAt(1);
5560
5561 bool is_volatile = field_info.IsVolatile();
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005562 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Calin Juravle52c48962014-12-16 17:02:57 +00005563 Primitive::Type field_type = field_info.GetFieldType();
5564 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01005565 bool needs_write_barrier =
5566 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00005567
5568 if (is_volatile) {
Roland Levillainc9285912015-12-18 10:38:42 +00005569 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00005570 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005571
5572 switch (field_type) {
5573 case Primitive::kPrimBoolean:
5574 case Primitive::kPrimByte: {
Calin Juravle52c48962014-12-16 17:02:57 +00005575 __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005576 break;
5577 }
5578
5579 case Primitive::kPrimShort:
5580 case Primitive::kPrimChar: {
Calin Juravle52c48962014-12-16 17:02:57 +00005581 __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005582 break;
5583 }
5584
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005585 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005586 case Primitive::kPrimNot: {
Roland Levillain4d027112015-07-01 15:41:14 +01005587 if (kPoisonHeapReferences && needs_write_barrier) {
5588 // Note that in the case where `value` is a null reference,
5589 // we do not enter this block, as a null reference does not
5590 // need poisoning.
5591 DCHECK_EQ(field_type, Primitive::kPrimNot);
5592 Register temp = locations->GetTemp(0).AsRegister<Register>();
5593 __ Mov(temp, value.AsRegister<Register>());
5594 __ PoisonHeapReference(temp);
5595 __ StoreToOffset(kStoreWord, temp, base, offset);
5596 } else {
5597 __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
5598 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005599 break;
5600 }
5601
5602 case Primitive::kPrimLong: {
Calin Juravle34166012014-12-19 17:22:29 +00005603 if (is_volatile && !atomic_ldrd_strd) {
Calin Juravle52c48962014-12-16 17:02:57 +00005604 GenerateWideAtomicStore(base, offset,
5605 value.AsRegisterPairLow<Register>(),
5606 value.AsRegisterPairHigh<Register>(),
5607 locations->GetTemp(0).AsRegister<Register>(),
Calin Juravle77520bc2015-01-12 18:45:46 +00005608 locations->GetTemp(1).AsRegister<Register>(),
5609 instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005610 } else {
5611 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
Calin Juravle77520bc2015-01-12 18:45:46 +00005612 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005613 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005614 break;
5615 }
5616
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005617 case Primitive::kPrimFloat: {
Calin Juravle52c48962014-12-16 17:02:57 +00005618 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005619 break;
5620 }
5621
5622 case Primitive::kPrimDouble: {
Calin Juravle52c48962014-12-16 17:02:57 +00005623 DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
Calin Juravle34166012014-12-19 17:22:29 +00005624 if (is_volatile && !atomic_ldrd_strd) {
Calin Juravle52c48962014-12-16 17:02:57 +00005625 Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
5626 Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
5627
5628 __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
5629
5630 GenerateWideAtomicStore(base, offset,
5631 value_reg_lo,
5632 value_reg_hi,
5633 locations->GetTemp(2).AsRegister<Register>(),
Calin Juravle77520bc2015-01-12 18:45:46 +00005634 locations->GetTemp(3).AsRegister<Register>(),
5635 instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005636 } else {
5637 __ StoreDToOffset(value_reg, base, offset);
Calin Juravle77520bc2015-01-12 18:45:46 +00005638 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005639 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005640 break;
5641 }
5642
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005643 case Primitive::kPrimVoid:
5644 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005645 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005646 }
Calin Juravle52c48962014-12-16 17:02:57 +00005647
Calin Juravle77520bc2015-01-12 18:45:46 +00005648 // Longs and doubles are handled in the switch.
5649 if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
5650 codegen_->MaybeRecordImplicitNullCheck(instruction);
5651 }
5652
5653 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
5654 Register temp = locations->GetTemp(0).AsRegister<Register>();
5655 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005656 codegen_->MarkGCCard(
5657 temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005658 }
5659
Calin Juravle52c48962014-12-16 17:02:57 +00005660 if (is_volatile) {
Roland Levillainc9285912015-12-18 10:38:42 +00005661 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005662 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005663}
5664
Calin Juravle52c48962014-12-16 17:02:57 +00005665void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5666 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain3b359c72015-11-17 19:35:12 +00005667
5668 bool object_field_get_with_read_barrier =
5669 kEmitCompilerReadBarrier && (field_info.GetFieldType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005670 LocationSummary* locations =
Roland Levillain3b359c72015-11-17 19:35:12 +00005671 new (GetGraph()->GetArena()) LocationSummary(instruction,
5672 object_field_get_with_read_barrier ?
5673 LocationSummary::kCallOnSlowPath :
5674 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005675 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005676 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005677 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005678 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle52c48962014-12-16 17:02:57 +00005679
Nicolas Geoffray829280c2015-01-28 10:20:37 +00005680 bool volatile_for_double = field_info.IsVolatile()
Calin Juravle34166012014-12-19 17:22:29 +00005681 && (field_info.GetFieldType() == Primitive::kPrimDouble)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005682 && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Roland Levillain3b359c72015-11-17 19:35:12 +00005683 // The output overlaps in case of volatile long: we don't want the
5684 // code generated by GenerateWideAtomicLoad to overwrite the
5685 // object's location. Likewise, in the case of an object field get
5686 // with read barriers enabled, we do not want the load to overwrite
5687 // the object's location, as we need it to emit the read barrier.
5688 bool overlap = (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) ||
5689 object_field_get_with_read_barrier;
Nicolas Geoffrayacc0b8e2015-04-20 12:39:57 +01005690
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005691 if (Primitive::IsFloatingPointType(instruction->GetType())) {
5692 locations->SetOut(Location::RequiresFpuRegister());
5693 } else {
5694 locations->SetOut(Location::RequiresRegister(),
5695 (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
5696 }
Nicolas Geoffray829280c2015-01-28 10:20:37 +00005697 if (volatile_for_double) {
Roland Levillainc9285912015-12-18 10:38:42 +00005698 // ARM encoding have some additional constraints for ldrexd/strexd:
Calin Juravle52c48962014-12-16 17:02:57 +00005699 // - registers need to be consecutive
5700 // - the first register should be even but not R14.
Roland Levillainc9285912015-12-18 10:38:42 +00005701 // We don't test for ARM yet, and the assertion makes sure that we
5702 // revisit this if we ever enable ARM encoding.
Calin Juravle52c48962014-12-16 17:02:57 +00005703 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
5704 locations->AddTemp(Location::RequiresRegister());
5705 locations->AddTemp(Location::RequiresRegister());
Roland Levillainc9285912015-12-18 10:38:42 +00005706 } else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
5707 // We need a temporary register for the read barrier marking slow
5708 // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01005709 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
5710 !Runtime::Current()->UseJitCompilation()) {
5711 // If link-time thunks for the Baker read barrier are enabled, for AOT
5712 // loads we need a temporary only if the offset is too big.
5713 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
5714 locations->AddTemp(Location::RequiresRegister());
5715 }
5716 // And we always need the reserved entrypoint register.
5717 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
5718 } else {
5719 locations->AddTemp(Location::RequiresRegister());
5720 }
Calin Juravle52c48962014-12-16 17:02:57 +00005721 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005722}
5723
Vladimir Marko37dd80d2016-08-01 17:41:45 +01005724Location LocationsBuilderARM::ArithmeticZeroOrFpuRegister(HInstruction* input) {
5725 DCHECK(input->GetType() == Primitive::kPrimDouble || input->GetType() == Primitive::kPrimFloat)
5726 << input->GetType();
5727 if ((input->IsFloatConstant() && (input->AsFloatConstant()->IsArithmeticZero())) ||
5728 (input->IsDoubleConstant() && (input->AsDoubleConstant()->IsArithmeticZero()))) {
5729 return Location::ConstantLocation(input->AsConstant());
5730 } else {
5731 return Location::RequiresFpuRegister();
5732 }
5733}
5734
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005735Location LocationsBuilderARM::ArmEncodableConstantOrRegister(HInstruction* constant,
5736 Opcode opcode) {
5737 DCHECK(!Primitive::IsFloatingPointType(constant->GetType()));
5738 if (constant->IsConstant() &&
5739 CanEncodeConstantAsImmediate(constant->AsConstant(), opcode)) {
5740 return Location::ConstantLocation(constant->AsConstant());
5741 }
5742 return Location::RequiresRegister();
5743}
5744
5745bool LocationsBuilderARM::CanEncodeConstantAsImmediate(HConstant* input_cst,
5746 Opcode opcode) {
5747 uint64_t value = static_cast<uint64_t>(Int64FromConstant(input_cst));
5748 if (Primitive::Is64BitType(input_cst->GetType())) {
Vladimir Marko59751a72016-08-05 14:37:27 +01005749 Opcode high_opcode = opcode;
5750 SetCc low_set_cc = kCcDontCare;
5751 switch (opcode) {
5752 case SUB:
5753 // Flip the operation to an ADD.
5754 value = -value;
5755 opcode = ADD;
5756 FALLTHROUGH_INTENDED;
5757 case ADD:
5758 if (Low32Bits(value) == 0u) {
5759 return CanEncodeConstantAsImmediate(High32Bits(value), opcode, kCcDontCare);
5760 }
5761 high_opcode = ADC;
5762 low_set_cc = kCcSet;
5763 break;
5764 default:
5765 break;
5766 }
5767 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode, low_set_cc) &&
5768 CanEncodeConstantAsImmediate(High32Bits(value), high_opcode, kCcDontCare);
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005769 } else {
5770 return CanEncodeConstantAsImmediate(Low32Bits(value), opcode);
5771 }
5772}
5773
Vladimir Marko59751a72016-08-05 14:37:27 +01005774bool LocationsBuilderARM::CanEncodeConstantAsImmediate(uint32_t value,
5775 Opcode opcode,
5776 SetCc set_cc) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005777 ShifterOperand so;
5778 ArmAssembler* assembler = codegen_->GetAssembler();
Vladimir Marko59751a72016-08-05 14:37:27 +01005779 if (assembler->ShifterOperandCanHold(kNoRegister, kNoRegister, opcode, value, set_cc, &so)) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005780 return true;
5781 }
5782 Opcode neg_opcode = kNoOperand;
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005783 uint32_t neg_value = 0;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005784 switch (opcode) {
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005785 case AND: neg_opcode = BIC; neg_value = ~value; break;
5786 case ORR: neg_opcode = ORN; neg_value = ~value; break;
5787 case ADD: neg_opcode = SUB; neg_value = -value; break;
5788 case ADC: neg_opcode = SBC; neg_value = ~value; break;
5789 case SUB: neg_opcode = ADD; neg_value = -value; break;
5790 case SBC: neg_opcode = ADC; neg_value = ~value; break;
5791 case MOV: neg_opcode = MVN; neg_value = ~value; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005792 default:
5793 return false;
5794 }
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00005795
5796 if (assembler->ShifterOperandCanHold(kNoRegister,
5797 kNoRegister,
5798 neg_opcode,
5799 neg_value,
5800 set_cc,
5801 &so)) {
5802 return true;
5803 }
5804
5805 return opcode == AND && IsPowerOfTwo(value + 1);
Vladimir Markod2b4ca22015-09-14 15:13:26 +01005806}
5807
Calin Juravle52c48962014-12-16 17:02:57 +00005808void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
5809 const FieldInfo& field_info) {
5810 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005811
Calin Juravle52c48962014-12-16 17:02:57 +00005812 LocationSummary* locations = instruction->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00005813 Location base_loc = locations->InAt(0);
5814 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00005815 Location out = locations->Out();
5816 bool is_volatile = field_info.IsVolatile();
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005817 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Calin Juravle52c48962014-12-16 17:02:57 +00005818 Primitive::Type field_type = field_info.GetFieldType();
5819 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
5820
5821 switch (field_type) {
Roland Levillainc9285912015-12-18 10:38:42 +00005822 case Primitive::kPrimBoolean:
Calin Juravle52c48962014-12-16 17:02:57 +00005823 __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005824 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005825
Roland Levillainc9285912015-12-18 10:38:42 +00005826 case Primitive::kPrimByte:
Calin Juravle52c48962014-12-16 17:02:57 +00005827 __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005828 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005829
Roland Levillainc9285912015-12-18 10:38:42 +00005830 case Primitive::kPrimShort:
Calin Juravle52c48962014-12-16 17:02:57 +00005831 __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005832 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005833
Roland Levillainc9285912015-12-18 10:38:42 +00005834 case Primitive::kPrimChar:
Calin Juravle52c48962014-12-16 17:02:57 +00005835 __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005836 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005837
5838 case Primitive::kPrimInt:
Calin Juravle52c48962014-12-16 17:02:57 +00005839 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005840 break;
Roland Levillainc9285912015-12-18 10:38:42 +00005841
5842 case Primitive::kPrimNot: {
5843 // /* HeapReference<Object> */ out = *(base + offset)
5844 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
5845 Location temp_loc = locations->GetTemp(0);
5846 // Note that a potential implicit null check is handled in this
5847 // CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier call.
5848 codegen_->GenerateFieldLoadWithBakerReadBarrier(
5849 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
5850 if (is_volatile) {
5851 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5852 }
5853 } else {
5854 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
5855 codegen_->MaybeRecordImplicitNullCheck(instruction);
5856 if (is_volatile) {
5857 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5858 }
5859 // If read barriers are enabled, emit read barriers other than
5860 // Baker's using a slow path (and also unpoison the loaded
5861 // reference, if heap poisoning is enabled).
5862 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
5863 }
5864 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005865 }
5866
Roland Levillainc9285912015-12-18 10:38:42 +00005867 case Primitive::kPrimLong:
Calin Juravle34166012014-12-19 17:22:29 +00005868 if (is_volatile && !atomic_ldrd_strd) {
Calin Juravle52c48962014-12-16 17:02:57 +00005869 GenerateWideAtomicLoad(base, offset,
5870 out.AsRegisterPairLow<Register>(),
5871 out.AsRegisterPairHigh<Register>());
5872 } else {
5873 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
5874 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005875 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005876
Roland Levillainc9285912015-12-18 10:38:42 +00005877 case Primitive::kPrimFloat:
Calin Juravle52c48962014-12-16 17:02:57 +00005878 __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005879 break;
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005880
5881 case Primitive::kPrimDouble: {
Calin Juravle52c48962014-12-16 17:02:57 +00005882 DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
Calin Juravle34166012014-12-19 17:22:29 +00005883 if (is_volatile && !atomic_ldrd_strd) {
Calin Juravle52c48962014-12-16 17:02:57 +00005884 Register lo = locations->GetTemp(0).AsRegister<Register>();
5885 Register hi = locations->GetTemp(1).AsRegister<Register>();
5886 GenerateWideAtomicLoad(base, offset, lo, hi);
Calin Juravle77520bc2015-01-12 18:45:46 +00005887 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005888 __ vmovdrr(out_reg, lo, hi);
5889 } else {
5890 __ LoadDFromOffset(out_reg, base, offset);
Calin Juravle77520bc2015-01-12 18:45:46 +00005891 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005892 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00005893 break;
5894 }
5895
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005896 case Primitive::kPrimVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00005897 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005898 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005899 }
Calin Juravle52c48962014-12-16 17:02:57 +00005900
Roland Levillainc9285912015-12-18 10:38:42 +00005901 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimDouble) {
5902 // Potential implicit null checks, in the case of reference or
5903 // double fields, are handled in the previous switch statement.
5904 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005905 codegen_->MaybeRecordImplicitNullCheck(instruction);
5906 }
5907
Calin Juravle52c48962014-12-16 17:02:57 +00005908 if (is_volatile) {
Roland Levillainc9285912015-12-18 10:38:42 +00005909 if (field_type == Primitive::kPrimNot) {
5910 // Memory barriers, in the case of references, are also handled
5911 // in the previous switch statement.
5912 } else {
5913 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
5914 }
Roland Levillain4d027112015-07-01 15:41:14 +01005915 }
Calin Juravle52c48962014-12-16 17:02:57 +00005916}
5917
5918void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5919 HandleFieldSet(instruction, instruction->GetFieldInfo());
5920}
5921
5922void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005923 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005924}
5925
5926void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5927 HandleFieldGet(instruction, instruction->GetFieldInfo());
5928}
5929
5930void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5931 HandleFieldGet(instruction, instruction->GetFieldInfo());
5932}
5933
5934void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5935 HandleFieldGet(instruction, instruction->GetFieldInfo());
5936}
5937
5938void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5939 HandleFieldGet(instruction, instruction->GetFieldInfo());
5940}
5941
5942void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5943 HandleFieldSet(instruction, instruction->GetFieldInfo());
5944}
5945
5946void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005947 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005948}
5949
Calin Juravlee460d1d2015-09-29 04:52:17 +01005950void LocationsBuilderARM::VisitUnresolvedInstanceFieldGet(
5951 HUnresolvedInstanceFieldGet* instruction) {
5952 FieldAccessCallingConventionARM calling_convention;
5953 codegen_->CreateUnresolvedFieldLocationSummary(
5954 instruction, instruction->GetFieldType(), calling_convention);
5955}
5956
5957void InstructionCodeGeneratorARM::VisitUnresolvedInstanceFieldGet(
5958 HUnresolvedInstanceFieldGet* instruction) {
5959 FieldAccessCallingConventionARM calling_convention;
5960 codegen_->GenerateUnresolvedFieldAccess(instruction,
5961 instruction->GetFieldType(),
5962 instruction->GetFieldIndex(),
5963 instruction->GetDexPc(),
5964 calling_convention);
5965}
5966
5967void LocationsBuilderARM::VisitUnresolvedInstanceFieldSet(
5968 HUnresolvedInstanceFieldSet* instruction) {
5969 FieldAccessCallingConventionARM calling_convention;
5970 codegen_->CreateUnresolvedFieldLocationSummary(
5971 instruction, instruction->GetFieldType(), calling_convention);
5972}
5973
5974void InstructionCodeGeneratorARM::VisitUnresolvedInstanceFieldSet(
5975 HUnresolvedInstanceFieldSet* instruction) {
5976 FieldAccessCallingConventionARM calling_convention;
5977 codegen_->GenerateUnresolvedFieldAccess(instruction,
5978 instruction->GetFieldType(),
5979 instruction->GetFieldIndex(),
5980 instruction->GetDexPc(),
5981 calling_convention);
5982}
5983
5984void LocationsBuilderARM::VisitUnresolvedStaticFieldGet(
5985 HUnresolvedStaticFieldGet* instruction) {
5986 FieldAccessCallingConventionARM calling_convention;
5987 codegen_->CreateUnresolvedFieldLocationSummary(
5988 instruction, instruction->GetFieldType(), calling_convention);
5989}
5990
5991void InstructionCodeGeneratorARM::VisitUnresolvedStaticFieldGet(
5992 HUnresolvedStaticFieldGet* instruction) {
5993 FieldAccessCallingConventionARM calling_convention;
5994 codegen_->GenerateUnresolvedFieldAccess(instruction,
5995 instruction->GetFieldType(),
5996 instruction->GetFieldIndex(),
5997 instruction->GetDexPc(),
5998 calling_convention);
5999}
6000
6001void LocationsBuilderARM::VisitUnresolvedStaticFieldSet(
6002 HUnresolvedStaticFieldSet* instruction) {
6003 FieldAccessCallingConventionARM calling_convention;
6004 codegen_->CreateUnresolvedFieldLocationSummary(
6005 instruction, instruction->GetFieldType(), calling_convention);
6006}
6007
6008void InstructionCodeGeneratorARM::VisitUnresolvedStaticFieldSet(
6009 HUnresolvedStaticFieldSet* instruction) {
6010 FieldAccessCallingConventionARM calling_convention;
6011 codegen_->GenerateUnresolvedFieldAccess(instruction,
6012 instruction->GetFieldType(),
6013 instruction->GetFieldIndex(),
6014 instruction->GetDexPc(),
6015 calling_convention);
6016}
6017
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006018void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006019 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6020 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006021}
6022
Calin Juravle2ae48182016-03-16 14:05:09 +00006023void CodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
6024 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00006025 return;
6026 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006027 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00006028
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006029 __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006030 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006031}
6032
Calin Juravle2ae48182016-03-16 14:05:09 +00006033void CodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
Artem Serovf4d6aee2016-07-11 10:41:45 +01006034 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006035 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006036
6037 LocationSummary* locations = instruction->GetLocations();
6038 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006039
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01006040 __ CompareAndBranchIfZero(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01006041}
6042
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006043void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006044 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00006045}
6046
Artem Serov6c916792016-07-11 14:02:34 +01006047static LoadOperandType GetLoadOperandType(Primitive::Type type) {
6048 switch (type) {
6049 case Primitive::kPrimNot:
6050 return kLoadWord;
6051 case Primitive::kPrimBoolean:
6052 return kLoadUnsignedByte;
6053 case Primitive::kPrimByte:
6054 return kLoadSignedByte;
6055 case Primitive::kPrimChar:
6056 return kLoadUnsignedHalfword;
6057 case Primitive::kPrimShort:
6058 return kLoadSignedHalfword;
6059 case Primitive::kPrimInt:
6060 return kLoadWord;
6061 case Primitive::kPrimLong:
6062 return kLoadWordPair;
6063 case Primitive::kPrimFloat:
6064 return kLoadSWord;
6065 case Primitive::kPrimDouble:
6066 return kLoadDWord;
6067 default:
6068 LOG(FATAL) << "Unreachable type " << type;
6069 UNREACHABLE();
6070 }
6071}
6072
6073static StoreOperandType GetStoreOperandType(Primitive::Type type) {
6074 switch (type) {
6075 case Primitive::kPrimNot:
6076 return kStoreWord;
6077 case Primitive::kPrimBoolean:
6078 case Primitive::kPrimByte:
6079 return kStoreByte;
6080 case Primitive::kPrimChar:
6081 case Primitive::kPrimShort:
6082 return kStoreHalfword;
6083 case Primitive::kPrimInt:
6084 return kStoreWord;
6085 case Primitive::kPrimLong:
6086 return kStoreWordPair;
6087 case Primitive::kPrimFloat:
6088 return kStoreSWord;
6089 case Primitive::kPrimDouble:
6090 return kStoreDWord;
6091 default:
6092 LOG(FATAL) << "Unreachable type " << type;
6093 UNREACHABLE();
6094 }
6095}
6096
6097void CodeGeneratorARM::LoadFromShiftedRegOffset(Primitive::Type type,
6098 Location out_loc,
6099 Register base,
6100 Register reg_offset,
6101 Condition cond) {
6102 uint32_t shift_count = Primitive::ComponentSizeShift(type);
6103 Address mem_address(base, reg_offset, Shift::LSL, shift_count);
6104
6105 switch (type) {
6106 case Primitive::kPrimByte:
6107 __ ldrsb(out_loc.AsRegister<Register>(), mem_address, cond);
6108 break;
6109 case Primitive::kPrimBoolean:
6110 __ ldrb(out_loc.AsRegister<Register>(), mem_address, cond);
6111 break;
6112 case Primitive::kPrimShort:
6113 __ ldrsh(out_loc.AsRegister<Register>(), mem_address, cond);
6114 break;
6115 case Primitive::kPrimChar:
6116 __ ldrh(out_loc.AsRegister<Register>(), mem_address, cond);
6117 break;
6118 case Primitive::kPrimNot:
6119 case Primitive::kPrimInt:
6120 __ ldr(out_loc.AsRegister<Register>(), mem_address, cond);
6121 break;
6122 // T32 doesn't support LoadFromShiftedRegOffset mem address mode for these types.
6123 case Primitive::kPrimLong:
6124 case Primitive::kPrimFloat:
6125 case Primitive::kPrimDouble:
6126 default:
6127 LOG(FATAL) << "Unreachable type " << type;
6128 UNREACHABLE();
6129 }
6130}
6131
6132void CodeGeneratorARM::StoreToShiftedRegOffset(Primitive::Type type,
6133 Location loc,
6134 Register base,
6135 Register reg_offset,
6136 Condition cond) {
6137 uint32_t shift_count = Primitive::ComponentSizeShift(type);
6138 Address mem_address(base, reg_offset, Shift::LSL, shift_count);
6139
6140 switch (type) {
6141 case Primitive::kPrimByte:
6142 case Primitive::kPrimBoolean:
6143 __ strb(loc.AsRegister<Register>(), mem_address, cond);
6144 break;
6145 case Primitive::kPrimShort:
6146 case Primitive::kPrimChar:
6147 __ strh(loc.AsRegister<Register>(), mem_address, cond);
6148 break;
6149 case Primitive::kPrimNot:
6150 case Primitive::kPrimInt:
6151 __ str(loc.AsRegister<Register>(), mem_address, cond);
6152 break;
6153 // T32 doesn't support StoreToShiftedRegOffset mem address mode for these types.
6154 case Primitive::kPrimLong:
6155 case Primitive::kPrimFloat:
6156 case Primitive::kPrimDouble:
6157 default:
6158 LOG(FATAL) << "Unreachable type " << type;
6159 UNREACHABLE();
6160 }
6161}
6162
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006163void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain3b359c72015-11-17 19:35:12 +00006164 bool object_array_get_with_read_barrier =
6165 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01006166 LocationSummary* locations =
Roland Levillain3b359c72015-11-17 19:35:12 +00006167 new (GetGraph()->GetArena()) LocationSummary(instruction,
6168 object_array_get_with_read_barrier ?
6169 LocationSummary::kCallOnSlowPath :
6170 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01006171 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006172 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006173 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006174 locations->SetInAt(0, Location::RequiresRegister());
6175 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01006176 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6177 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6178 } else {
Roland Levillain3b359c72015-11-17 19:35:12 +00006179 // The output overlaps in the case of an object array get with
6180 // read barriers enabled: we do not want the move to overwrite the
6181 // array's location, as we need it to emit the read barrier.
6182 locations->SetOut(
6183 Location::RequiresRegister(),
6184 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01006185 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006186 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
6187 // We need a temporary register for the read barrier marking slow
6188 // path in CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier.
6189 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
6190 !Runtime::Current()->UseJitCompilation() &&
6191 instruction->GetIndex()->IsConstant()) {
6192 // Array loads with constant index are treated as field loads.
6193 // If link-time thunks for the Baker read barrier are enabled, for AOT
6194 // constant index loads we need a temporary only if the offset is too big.
6195 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
6196 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
6197 offset += index << Primitive::ComponentSizeShift(Primitive::kPrimNot);
6198 if (offset >= kReferenceLoadMinFarOffset) {
6199 locations->AddTemp(Location::RequiresRegister());
6200 }
6201 // And we always need the reserved entrypoint register.
6202 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
6203 } else if (kBakerReadBarrierLinkTimeThunksEnableForArrays &&
6204 !Runtime::Current()->UseJitCompilation() &&
6205 !instruction->GetIndex()->IsConstant()) {
6206 // We need a non-scratch temporary for the array data pointer.
6207 locations->AddTemp(Location::RequiresRegister());
6208 // And we always need the reserved entrypoint register.
6209 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
6210 } else {
6211 locations->AddTemp(Location::RequiresRegister());
6212 }
6213 } else if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
6214 // Also need a temporary for String compression feature.
Roland Levillainc9285912015-12-18 10:38:42 +00006215 locations->AddTemp(Location::RequiresRegister());
6216 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006217}
6218
6219void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
6220 LocationSummary* locations = instruction->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00006221 Location obj_loc = locations->InAt(0);
6222 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006223 Location index = locations->InAt(1);
Roland Levillainc9285912015-12-18 10:38:42 +00006224 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01006225 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Roland Levillainc9285912015-12-18 10:38:42 +00006226 Primitive::Type type = instruction->GetType();
jessicahandojo05765752016-09-09 19:01:32 -07006227 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
6228 instruction->IsStringCharAt();
Artem Serov328429f2016-07-06 16:23:04 +01006229 HInstruction* array_instr = instruction->GetArray();
6230 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Artem Serov6c916792016-07-11 14:02:34 +01006231
Roland Levillain4d027112015-07-01 15:41:14 +01006232 switch (type) {
Artem Serov6c916792016-07-11 14:02:34 +01006233 case Primitive::kPrimBoolean:
6234 case Primitive::kPrimByte:
6235 case Primitive::kPrimShort:
6236 case Primitive::kPrimChar:
Roland Levillainc9285912015-12-18 10:38:42 +00006237 case Primitive::kPrimInt: {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006238 Register length;
6239 if (maybe_compressed_char_at) {
6240 length = locations->GetTemp(0).AsRegister<Register>();
6241 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
6242 __ LoadFromOffset(kLoadWord, length, obj, count_offset);
6243 codegen_->MaybeRecordImplicitNullCheck(instruction);
6244 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006245 if (index.IsConstant()) {
Artem Serov6c916792016-07-11 14:02:34 +01006246 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
jessicahandojo05765752016-09-09 19:01:32 -07006247 if (maybe_compressed_char_at) {
jessicahandojo05765752016-09-09 19:01:32 -07006248 Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006249 Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006250 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
6251 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6252 "Expecting 0=compressed, 1=uncompressed");
6253 __ b(&uncompressed_load, CS);
jessicahandojo05765752016-09-09 19:01:32 -07006254 __ LoadFromOffset(kLoadUnsignedByte,
6255 out_loc.AsRegister<Register>(),
6256 obj,
6257 data_offset + const_index);
Anton Kirilov6f644202017-02-27 18:29:45 +00006258 __ b(final_label);
jessicahandojo05765752016-09-09 19:01:32 -07006259 __ Bind(&uncompressed_load);
6260 __ LoadFromOffset(GetLoadOperandType(Primitive::kPrimChar),
6261 out_loc.AsRegister<Register>(),
6262 obj,
6263 data_offset + (const_index << 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00006264 if (done.IsLinked()) {
6265 __ Bind(&done);
6266 }
jessicahandojo05765752016-09-09 19:01:32 -07006267 } else {
6268 uint32_t full_offset = data_offset + (const_index << Primitive::ComponentSizeShift(type));
Artem Serov6c916792016-07-11 14:02:34 +01006269
jessicahandojo05765752016-09-09 19:01:32 -07006270 LoadOperandType load_type = GetLoadOperandType(type);
6271 __ LoadFromOffset(load_type, out_loc.AsRegister<Register>(), obj, full_offset);
6272 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006273 } else {
Artem Serov328429f2016-07-06 16:23:04 +01006274 Register temp = IP;
6275
6276 if (has_intermediate_address) {
6277 // We do not need to compute the intermediate address from the array: the
6278 // input instruction has done it already. See the comment in
6279 // `TryExtractArrayAccessAddress()`.
6280 if (kIsDebugBuild) {
6281 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
6282 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
6283 }
6284 temp = obj;
6285 } else {
6286 __ add(temp, obj, ShifterOperand(data_offset));
6287 }
jessicahandojo05765752016-09-09 19:01:32 -07006288 if (maybe_compressed_char_at) {
6289 Label uncompressed_load, done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006290 Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006291 __ Lsrs(length, length, 1u); // LSRS has a 16-bit encoding, TST (immediate) does not.
6292 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
6293 "Expecting 0=compressed, 1=uncompressed");
6294 __ b(&uncompressed_load, CS);
jessicahandojo05765752016-09-09 19:01:32 -07006295 __ ldrb(out_loc.AsRegister<Register>(),
6296 Address(temp, index.AsRegister<Register>(), Shift::LSL, 0));
Anton Kirilov6f644202017-02-27 18:29:45 +00006297 __ b(final_label);
jessicahandojo05765752016-09-09 19:01:32 -07006298 __ Bind(&uncompressed_load);
6299 __ ldrh(out_loc.AsRegister<Register>(),
6300 Address(temp, index.AsRegister<Register>(), Shift::LSL, 1));
Anton Kirilov6f644202017-02-27 18:29:45 +00006301 if (done.IsLinked()) {
6302 __ Bind(&done);
6303 }
jessicahandojo05765752016-09-09 19:01:32 -07006304 } else {
6305 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, index.AsRegister<Register>());
6306 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006307 }
6308 break;
6309 }
6310
Roland Levillainc9285912015-12-18 10:38:42 +00006311 case Primitive::kPrimNot: {
Roland Levillain19c54192016-11-04 13:44:09 +00006312 // The read barrier instrumentation of object ArrayGet
6313 // instructions does not support the HIntermediateAddress
6314 // instruction.
6315 DCHECK(!(has_intermediate_address && kEmitCompilerReadBarrier));
6316
Roland Levillainc9285912015-12-18 10:38:42 +00006317 static_assert(
6318 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6319 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillainc9285912015-12-18 10:38:42 +00006320 // /* HeapReference<Object> */ out =
6321 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6322 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
6323 Location temp = locations->GetTemp(0);
6324 // Note that a potential implicit null check is handled in this
6325 // CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01006326 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
6327 if (index.IsConstant()) {
6328 // Array load with a constant index can be treated as a field load.
6329 data_offset += helpers::Int32ConstantFrom(index) << Primitive::ComponentSizeShift(type);
6330 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6331 out_loc,
6332 obj,
6333 data_offset,
6334 locations->GetTemp(0),
6335 /* needs_null_check */ false);
6336 } else {
6337 codegen_->GenerateArrayLoadWithBakerReadBarrier(
6338 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ false);
6339 }
Roland Levillainc9285912015-12-18 10:38:42 +00006340 } else {
6341 Register out = out_loc.AsRegister<Register>();
6342 if (index.IsConstant()) {
6343 size_t offset =
6344 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
6345 __ LoadFromOffset(kLoadWord, out, obj, offset);
6346 codegen_->MaybeRecordImplicitNullCheck(instruction);
6347 // If read barriers are enabled, emit read barriers other than
6348 // Baker's using a slow path (and also unpoison the loaded
6349 // reference, if heap poisoning is enabled).
6350 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
6351 } else {
Artem Serov328429f2016-07-06 16:23:04 +01006352 Register temp = IP;
6353
6354 if (has_intermediate_address) {
6355 // We do not need to compute the intermediate address from the array: the
6356 // input instruction has done it already. See the comment in
6357 // `TryExtractArrayAccessAddress()`.
6358 if (kIsDebugBuild) {
6359 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
6360 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), data_offset);
6361 }
6362 temp = obj;
6363 } else {
6364 __ add(temp, obj, ShifterOperand(data_offset));
6365 }
6366 codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, index.AsRegister<Register>());
Artem Serov6c916792016-07-11 14:02:34 +01006367
Roland Levillainc9285912015-12-18 10:38:42 +00006368 codegen_->MaybeRecordImplicitNullCheck(instruction);
6369 // If read barriers are enabled, emit read barriers other than
6370 // Baker's using a slow path (and also unpoison the loaded
6371 // reference, if heap poisoning is enabled).
6372 codegen_->MaybeGenerateReadBarrierSlow(
6373 instruction, out_loc, out_loc, obj_loc, data_offset, index);
6374 }
6375 }
6376 break;
6377 }
6378
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006379 case Primitive::kPrimLong: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006380 if (index.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006381 size_t offset =
6382 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Roland Levillainc9285912015-12-18 10:38:42 +00006383 __ LoadFromOffset(kLoadWordPair, out_loc.AsRegisterPairLow<Register>(), obj, offset);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006384 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006385 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
Roland Levillainc9285912015-12-18 10:38:42 +00006386 __ LoadFromOffset(kLoadWordPair, out_loc.AsRegisterPairLow<Register>(), IP, data_offset);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006387 }
6388 break;
6389 }
6390
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006391 case Primitive::kPrimFloat: {
Roland Levillainc9285912015-12-18 10:38:42 +00006392 SRegister out = out_loc.AsFpuRegister<SRegister>();
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006393 if (index.IsConstant()) {
6394 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillainc9285912015-12-18 10:38:42 +00006395 __ LoadSFromOffset(out, obj, offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006396 } else {
6397 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
Roland Levillainc9285912015-12-18 10:38:42 +00006398 __ LoadSFromOffset(out, IP, data_offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006399 }
6400 break;
6401 }
6402
6403 case Primitive::kPrimDouble: {
Roland Levillainc9285912015-12-18 10:38:42 +00006404 SRegister out = out_loc.AsFpuRegisterPairLow<SRegister>();
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006405 if (index.IsConstant()) {
6406 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Roland Levillainc9285912015-12-18 10:38:42 +00006407 __ LoadDFromOffset(FromLowSToD(out), obj, offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006408 } else {
6409 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
Roland Levillainc9285912015-12-18 10:38:42 +00006410 __ LoadDFromOffset(FromLowSToD(out), IP, data_offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006411 }
6412 break;
6413 }
6414
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006415 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01006416 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07006417 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006418 }
Roland Levillain4d027112015-07-01 15:41:14 +01006419
6420 if (type == Primitive::kPrimNot) {
Roland Levillainc9285912015-12-18 10:38:42 +00006421 // Potential implicit null checks, in the case of reference
6422 // arrays, are handled in the previous switch statement.
jessicahandojo05765752016-09-09 19:01:32 -07006423 } else if (!maybe_compressed_char_at) {
Roland Levillainc9285912015-12-18 10:38:42 +00006424 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01006425 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006426}
6427
6428void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01006429 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006430
6431 bool needs_write_barrier =
6432 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillain3b359c72015-11-17 19:35:12 +00006433 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006434
Nicolas Geoffray39468442014-09-02 15:17:15 +01006435 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006436 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01006437 may_need_runtime_call_for_type_check ?
Roland Levillain3b359c72015-11-17 19:35:12 +00006438 LocationSummary::kCallOnSlowPath :
6439 LocationSummary::kNoCall);
6440
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006441 locations->SetInAt(0, Location::RequiresRegister());
6442 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
6443 if (Primitive::IsFloatingPointType(value_type)) {
6444 locations->SetInAt(2, Location::RequiresFpuRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006445 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006446 locations->SetInAt(2, Location::RequiresRegister());
6447 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006448 if (needs_write_barrier) {
6449 // Temporary registers for the write barrier.
6450 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00006451 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006452 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006453}
6454
6455void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
6456 LocationSummary* locations = instruction->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00006457 Location array_loc = locations->InAt(0);
6458 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006459 Location index = locations->InAt(1);
Nicolas Geoffray39468442014-09-02 15:17:15 +01006460 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain3b359c72015-11-17 19:35:12 +00006461 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006462 bool needs_write_barrier =
6463 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Artem Serov6c916792016-07-11 14:02:34 +01006464 uint32_t data_offset =
6465 mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
6466 Location value_loc = locations->InAt(2);
Artem Serov328429f2016-07-06 16:23:04 +01006467 HInstruction* array_instr = instruction->GetArray();
6468 bool has_intermediate_address = array_instr->IsIntermediateAddress();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006469
6470 switch (value_type) {
6471 case Primitive::kPrimBoolean:
Artem Serov6c916792016-07-11 14:02:34 +01006472 case Primitive::kPrimByte:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006473 case Primitive::kPrimShort:
Artem Serov6c916792016-07-11 14:02:34 +01006474 case Primitive::kPrimChar:
6475 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006476 if (index.IsConstant()) {
Artem Serov6c916792016-07-11 14:02:34 +01006477 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
6478 uint32_t full_offset =
6479 data_offset + (const_index << Primitive::ComponentSizeShift(value_type));
6480 StoreOperandType store_type = GetStoreOperandType(value_type);
6481 __ StoreToOffset(store_type, value_loc.AsRegister<Register>(), array, full_offset);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006482 } else {
Artem Serov328429f2016-07-06 16:23:04 +01006483 Register temp = IP;
6484
6485 if (has_intermediate_address) {
6486 // We do not need to compute the intermediate address from the array: the
6487 // input instruction has done it already. See the comment in
6488 // `TryExtractArrayAccessAddress()`.
6489 if (kIsDebugBuild) {
6490 HIntermediateAddress* tmp = array_instr->AsIntermediateAddress();
6491 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == data_offset);
6492 }
6493 temp = array;
6494 } else {
6495 __ add(temp, array, ShifterOperand(data_offset));
6496 }
Artem Serov6c916792016-07-11 14:02:34 +01006497 codegen_->StoreToShiftedRegOffset(value_type,
6498 value_loc,
Artem Serov328429f2016-07-06 16:23:04 +01006499 temp,
Artem Serov6c916792016-07-11 14:02:34 +01006500 index.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006501 }
6502 break;
6503 }
6504
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006505 case Primitive::kPrimNot: {
Roland Levillain3b359c72015-11-17 19:35:12 +00006506 Register value = value_loc.AsRegister<Register>();
Artem Serov328429f2016-07-06 16:23:04 +01006507 // TryExtractArrayAccessAddress optimization is never applied for non-primitive ArraySet.
6508 // See the comment in instruction_simplifier_shared.cc.
6509 DCHECK(!has_intermediate_address);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006510
6511 if (instruction->InputAt(2)->IsNullConstant()) {
6512 // Just setting null.
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006513 if (index.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006514 size_t offset =
6515 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Artem Serov6c916792016-07-11 14:02:34 +01006516 __ StoreToOffset(kStoreWord, value, array, offset);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006517 } else {
6518 DCHECK(index.IsRegister()) << index;
Artem Serov6c916792016-07-11 14:02:34 +01006519 __ add(IP, array, ShifterOperand(data_offset));
6520 codegen_->StoreToShiftedRegOffset(value_type,
6521 value_loc,
6522 IP,
6523 index.AsRegister<Register>());
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006524 }
Roland Levillain1407ee72016-01-08 15:56:19 +00006525 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain3b359c72015-11-17 19:35:12 +00006526 DCHECK(!needs_write_barrier);
6527 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006528 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00006529 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006530
6531 DCHECK(needs_write_barrier);
Roland Levillain16d9f942016-08-25 17:27:56 +01006532 Location temp1_loc = locations->GetTemp(0);
6533 Register temp1 = temp1_loc.AsRegister<Register>();
6534 Location temp2_loc = locations->GetTemp(1);
6535 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006536 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6537 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6538 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6539 Label done;
Anton Kirilov6f644202017-02-27 18:29:45 +00006540 Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Artem Serovf4d6aee2016-07-11 10:41:45 +01006541 SlowPathCodeARM* slow_path = nullptr;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006542
Roland Levillain3b359c72015-11-17 19:35:12 +00006543 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006544 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM(instruction);
6545 codegen_->AddSlowPath(slow_path);
6546 if (instruction->GetValueCanBeNull()) {
6547 Label non_zero;
6548 __ CompareAndBranchIfNonZero(value, &non_zero);
6549 if (index.IsConstant()) {
6550 size_t offset =
6551 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
6552 __ StoreToOffset(kStoreWord, value, array, offset);
6553 } else {
6554 DCHECK(index.IsRegister()) << index;
Artem Serov6c916792016-07-11 14:02:34 +01006555 __ add(IP, array, ShifterOperand(data_offset));
6556 codegen_->StoreToShiftedRegOffset(value_type,
6557 value_loc,
6558 IP,
6559 index.AsRegister<Register>());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006560 }
6561 codegen_->MaybeRecordImplicitNullCheck(instruction);
Anton Kirilov6f644202017-02-27 18:29:45 +00006562 __ b(final_label);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006563 __ Bind(&non_zero);
6564 }
6565
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006566 // Note that when read barriers are enabled, the type checks
6567 // are performed without read barriers. This is fine, even in
6568 // the case where a class object is in the from-space after
6569 // the flip, as a comparison involving such a type would not
6570 // produce a false positive; it may of course produce a false
6571 // negative, in which case we would take the ArraySet slow
6572 // path.
Roland Levillain16d9f942016-08-25 17:27:56 +01006573
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006574 // /* HeapReference<Class> */ temp1 = array->klass_
6575 __ LoadFromOffset(kLoadWord, temp1, array, class_offset);
6576 codegen_->MaybeRecordImplicitNullCheck(instruction);
6577 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillain16d9f942016-08-25 17:27:56 +01006578
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006579 // /* HeapReference<Class> */ temp1 = temp1->component_type_
6580 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
6581 // /* HeapReference<Class> */ temp2 = value->klass_
6582 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
6583 // If heap poisoning is enabled, no need to unpoison `temp1`
6584 // nor `temp2`, as we are comparing two poisoned references.
6585 __ cmp(temp1, ShifterOperand(temp2));
Roland Levillain16d9f942016-08-25 17:27:56 +01006586
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006587 if (instruction->StaticTypeOfArrayIsObjectArray()) {
6588 Label do_put;
6589 __ b(&do_put, EQ);
6590 // If heap poisoning is enabled, the `temp1` reference has
6591 // not been unpoisoned yet; unpoison it now.
Roland Levillain3b359c72015-11-17 19:35:12 +00006592 __ MaybeUnpoisonHeapReference(temp1);
6593
Roland Levillain9d6e1f82016-09-05 15:57:33 +01006594 // /* HeapReference<Class> */ temp1 = temp1->super_class_
6595 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
6596 // If heap poisoning is enabled, no need to unpoison
6597 // `temp1`, as we are comparing against null below.
6598 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
6599 __ Bind(&do_put);
6600 } else {
6601 __ b(slow_path->GetEntryLabel(), NE);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006602 }
6603 }
6604
Artem Serov6c916792016-07-11 14:02:34 +01006605 Register source = value;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006606 if (kPoisonHeapReferences) {
6607 // Note that in the case where `value` is a null reference,
6608 // we do not enter this block, as a null reference does not
6609 // need poisoning.
6610 DCHECK_EQ(value_type, Primitive::kPrimNot);
6611 __ Mov(temp1, value);
6612 __ PoisonHeapReference(temp1);
6613 source = temp1;
6614 }
6615
6616 if (index.IsConstant()) {
6617 size_t offset =
6618 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
6619 __ StoreToOffset(kStoreWord, source, array, offset);
6620 } else {
6621 DCHECK(index.IsRegister()) << index;
Artem Serov6c916792016-07-11 14:02:34 +01006622
6623 __ add(IP, array, ShifterOperand(data_offset));
6624 codegen_->StoreToShiftedRegOffset(value_type,
6625 Location::RegisterLocation(source),
6626 IP,
6627 index.AsRegister<Register>());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006628 }
6629
Roland Levillain3b359c72015-11-17 19:35:12 +00006630 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006631 codegen_->MaybeRecordImplicitNullCheck(instruction);
6632 }
6633
6634 codegen_->MarkGCCard(temp1, temp2, array, value, instruction->GetValueCanBeNull());
6635
6636 if (done.IsLinked()) {
6637 __ Bind(&done);
6638 }
6639
6640 if (slow_path != nullptr) {
6641 __ Bind(slow_path->GetExitLabel());
6642 }
6643
6644 break;
6645 }
6646
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006647 case Primitive::kPrimLong: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01006648 Location value = locations->InAt(2);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006649 if (index.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006650 size_t offset =
6651 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006652 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), array, offset);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006653 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006654 __ add(IP, array, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01006655 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006656 }
6657 break;
6658 }
6659
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006660 case Primitive::kPrimFloat: {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006661 Location value = locations->InAt(2);
6662 DCHECK(value.IsFpuRegister());
6663 if (index.IsConstant()) {
6664 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006665 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), array, offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006666 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006667 __ add(IP, array, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006668 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
6669 }
6670 break;
6671 }
6672
6673 case Primitive::kPrimDouble: {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006674 Location value = locations->InAt(2);
6675 DCHECK(value.IsFpuRegisterPair());
6676 if (index.IsConstant()) {
6677 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006678 __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), array, offset);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006679 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01006680 __ add(IP, array, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006681 __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
6682 }
Calin Juravle77520bc2015-01-12 18:45:46 +00006683
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006684 break;
6685 }
6686
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006687 case Primitive::kPrimVoid:
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006688 LOG(FATAL) << "Unreachable type " << value_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07006689 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006690 }
Calin Juravle77520bc2015-01-12 18:45:46 +00006691
Roland Levillain80e67092016-01-08 16:04:55 +00006692 // Objects are handled in the switch.
6693 if (value_type != Primitive::kPrimNot) {
Calin Juravle77520bc2015-01-12 18:45:46 +00006694 codegen_->MaybeRecordImplicitNullCheck(instruction);
6695 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006696}
6697
6698void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01006699 LocationSummary* locations =
6700 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01006701 locations->SetInAt(0, Location::RequiresRegister());
6702 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006703}
6704
6705void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
6706 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01006707 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00006708 Register obj = locations->InAt(0).AsRegister<Register>();
6709 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006710 __ LoadFromOffset(kLoadWord, out, obj, offset);
Calin Juravle77520bc2015-01-12 18:45:46 +00006711 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07006712 // Mask out compression flag from String's array length.
6713 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01006714 __ Lsr(out, out, 1u);
jessicahandojo05765752016-09-09 19:01:32 -07006715 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006716}
6717
Artem Serov328429f2016-07-06 16:23:04 +01006718void LocationsBuilderARM::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Artem Serov328429f2016-07-06 16:23:04 +01006719 LocationSummary* locations =
6720 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6721
6722 locations->SetInAt(0, Location::RequiresRegister());
6723 locations->SetInAt(1, Location::RegisterOrConstant(instruction->GetOffset()));
6724 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6725}
6726
6727void InstructionCodeGeneratorARM::VisitIntermediateAddress(HIntermediateAddress* instruction) {
6728 LocationSummary* locations = instruction->GetLocations();
6729 Location out = locations->Out();
6730 Location first = locations->InAt(0);
6731 Location second = locations->InAt(1);
6732
Artem Serov328429f2016-07-06 16:23:04 +01006733 if (second.IsRegister()) {
6734 __ add(out.AsRegister<Register>(),
6735 first.AsRegister<Register>(),
6736 ShifterOperand(second.AsRegister<Register>()));
6737 } else {
6738 __ AddConstant(out.AsRegister<Register>(),
6739 first.AsRegister<Register>(),
6740 second.GetConstant()->AsIntConstant()->GetValue());
6741 }
6742}
6743
Artem Serove1811ed2017-04-27 16:50:47 +01006744void LocationsBuilderARM::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
6745 LOG(FATAL) << "Unreachable " << instruction->GetId();
6746}
6747
6748void InstructionCodeGeneratorARM::VisitIntermediateAddressIndex(
6749 HIntermediateAddressIndex* instruction) {
6750 LOG(FATAL) << "Unreachable " << instruction->GetId();
6751}
6752
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006753void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006754 RegisterSet caller_saves = RegisterSet::Empty();
6755 InvokeRuntimeCallingConvention calling_convention;
6756 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6757 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
6758 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Artem Serov2dd053d2017-03-08 14:54:06 +00006759
6760 HInstruction* index = instruction->InputAt(0);
6761 HInstruction* length = instruction->InputAt(1);
6762 // If both index and length are constants we can statically check the bounds. But if at least one
6763 // of them is not encodable ArmEncodableConstantOrRegister will create
6764 // Location::RequiresRegister() which is not desired to happen. Instead we create constant
6765 // locations.
6766 bool both_const = index->IsConstant() && length->IsConstant();
6767 locations->SetInAt(0, both_const
6768 ? Location::ConstantLocation(index->AsConstant())
6769 : ArmEncodableConstantOrRegister(index, CMP));
6770 locations->SetInAt(1, both_const
6771 ? Location::ConstantLocation(length->AsConstant())
6772 : ArmEncodableConstantOrRegister(length, CMP));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006773}
6774
6775void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
6776 LocationSummary* locations = instruction->GetLocations();
Artem Serov2dd053d2017-03-08 14:54:06 +00006777 Location index_loc = locations->InAt(0);
6778 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006779
Artem Serov2dd053d2017-03-08 14:54:06 +00006780 if (length_loc.IsConstant()) {
6781 int32_t length = helpers::Int32ConstantFrom(length_loc);
6782 if (index_loc.IsConstant()) {
6783 // BCE will remove the bounds check if we are guaranteed to pass.
6784 int32_t index = helpers::Int32ConstantFrom(index_loc);
6785 if (index < 0 || index >= length) {
6786 SlowPathCodeARM* slow_path =
6787 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(instruction);
6788 codegen_->AddSlowPath(slow_path);
6789 __ b(slow_path->GetEntryLabel());
6790 } else {
6791 // Some optimization after BCE may have generated this, and we should not
6792 // generate a bounds check if it is a valid range.
6793 }
6794 return;
6795 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006796
Artem Serov2dd053d2017-03-08 14:54:06 +00006797 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(instruction);
6798 __ cmp(index_loc.AsRegister<Register>(), ShifterOperand(length));
6799 codegen_->AddSlowPath(slow_path);
6800 __ b(slow_path->GetEntryLabel(), HS);
6801 } else {
6802 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(instruction);
6803 if (index_loc.IsConstant()) {
6804 int32_t index = helpers::Int32ConstantFrom(index_loc);
6805 __ cmp(length_loc.AsRegister<Register>(), ShifterOperand(index));
6806 } else {
6807 __ cmp(length_loc.AsRegister<Register>(), ShifterOperand(index_loc.AsRegister<Register>()));
6808 }
6809 codegen_->AddSlowPath(slow_path);
6810 __ b(slow_path->GetEntryLabel(), LS);
6811 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006812}
6813
Nicolas Geoffray07276db2015-05-18 14:22:09 +01006814void CodeGeneratorARM::MarkGCCard(Register temp,
6815 Register card,
6816 Register object,
6817 Register value,
6818 bool can_be_null) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00006819 Label is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01006820 if (can_be_null) {
6821 __ CompareAndBranchIfZero(value, &is_null);
6822 }
Andreas Gampe542451c2016-07-26 09:02:02 -07006823 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmPointerSize>().Int32Value());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006824 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
6825 __ strb(card, Address(card, temp));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01006826 if (can_be_null) {
6827 __ Bind(&is_null);
6828 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01006829}
6830
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006831void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006832 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006833}
6834
6835void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006836 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6837}
6838
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006839void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01006840 LocationSummary* locations =
6841 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01006842 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006843}
6844
6845void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006846 HBasicBlock* block = instruction->GetBlock();
6847 if (block->GetLoopInformation() != nullptr) {
6848 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6849 // The back edge will generate the suspend check.
6850 return;
6851 }
6852 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6853 // The goto will generate the suspend check.
6854 return;
6855 }
6856 GenerateSuspendCheck(instruction, nullptr);
6857}
6858
6859void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
6860 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006861 SuspendCheckSlowPathARM* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01006862 down_cast<SuspendCheckSlowPathARM*>(instruction->GetSlowPath());
6863 if (slow_path == nullptr) {
6864 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
6865 instruction->SetSlowPath(slow_path);
6866 codegen_->AddSlowPath(slow_path);
6867 if (successor != nullptr) {
6868 DCHECK(successor->IsLoopHeader());
6869 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
6870 }
6871 } else {
6872 DCHECK_EQ(slow_path->GetSuccessor(), successor);
6873 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006874
Nicolas Geoffray44b819e2014-11-06 12:00:54 +00006875 __ LoadFromOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07006876 kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmPointerSize>().Int32Value());
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006877 if (successor == nullptr) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01006878 __ CompareAndBranchIfNonZero(IP, slow_path->GetEntryLabel());
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006879 __ Bind(slow_path->GetReturnLabel());
6880 } else {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01006881 __ CompareAndBranchIfZero(IP, codegen_->GetLabelOf(successor));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01006882 __ b(slow_path->GetEntryLabel());
6883 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00006884}
6885
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006886ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
6887 return codegen_->GetAssembler();
6888}
6889
6890void ParallelMoveResolverARM::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01006891 MoveOperands* move = moves_[index];
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006892 Location source = move->GetSource();
6893 Location destination = move->GetDestination();
6894
6895 if (source.IsRegister()) {
6896 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006897 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006898 } else if (destination.IsFpuRegister()) {
6899 __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006900 } else {
6901 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006902 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006903 SP, destination.GetStackIndex());
6904 }
6905 } else if (source.IsStackSlot()) {
6906 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006907 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006908 SP, source.GetStackIndex());
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006909 } else if (destination.IsFpuRegister()) {
6910 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01006911 } else {
6912 DCHECK(destination.IsStackSlot());
6913 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
6914 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
6915 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006916 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006917 if (destination.IsRegister()) {
6918 __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
6919 } else if (destination.IsFpuRegister()) {
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006920 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01006921 } else {
6922 DCHECK(destination.IsStackSlot());
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006923 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
6924 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006925 } else if (source.IsDoubleStackSlot()) {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006926 if (destination.IsDoubleStackSlot()) {
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00006927 __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
6928 __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006929 } else if (destination.IsRegisterPair()) {
6930 DCHECK(ExpectedPairLayout(destination));
6931 __ LoadFromOffset(
6932 kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
6933 } else {
6934 DCHECK(destination.IsFpuRegisterPair()) << destination;
6935 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
6936 SP,
6937 source.GetStackIndex());
6938 }
6939 } else if (source.IsRegisterPair()) {
6940 if (destination.IsRegisterPair()) {
6941 __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
6942 __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00006943 } else if (destination.IsFpuRegisterPair()) {
6944 __ vmovdrr(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
6945 source.AsRegisterPairLow<Register>(),
6946 source.AsRegisterPairHigh<Register>());
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006947 } else {
6948 DCHECK(destination.IsDoubleStackSlot()) << destination;
6949 DCHECK(ExpectedPairLayout(source));
6950 __ StoreToOffset(
6951 kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
6952 }
6953 } else if (source.IsFpuRegisterPair()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00006954 if (destination.IsRegisterPair()) {
6955 __ vmovrrd(destination.AsRegisterPairLow<Register>(),
6956 destination.AsRegisterPairHigh<Register>(),
6957 FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
6958 } else if (destination.IsFpuRegisterPair()) {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006959 __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
6960 FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
6961 } else {
6962 DCHECK(destination.IsDoubleStackSlot()) << destination;
6963 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
6964 SP,
6965 destination.GetStackIndex());
6966 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006967 } else {
6968 DCHECK(source.IsConstant()) << source;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00006969 HConstant* constant = source.GetConstant();
6970 if (constant->IsIntConstant() || constant->IsNullConstant()) {
6971 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00006972 if (destination.IsRegister()) {
6973 __ LoadImmediate(destination.AsRegister<Register>(), value);
6974 } else {
6975 DCHECK(destination.IsStackSlot());
6976 __ LoadImmediate(IP, value);
6977 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
6978 }
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00006979 } else if (constant->IsLongConstant()) {
6980 int64_t value = constant->AsLongConstant()->GetValue();
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006981 if (destination.IsRegisterPair()) {
6982 __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
6983 __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00006984 } else {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006985 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00006986 __ LoadImmediate(IP, Low32Bits(value));
6987 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
6988 __ LoadImmediate(IP, High32Bits(value));
6989 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
6990 }
6991 } else if (constant->IsDoubleConstant()) {
6992 double value = constant->AsDoubleConstant()->GetValue();
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006993 if (destination.IsFpuRegisterPair()) {
6994 __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00006995 } else {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00006996 DCHECK(destination.IsDoubleStackSlot()) << destination;
6997 uint64_t int_value = bit_cast<uint64_t, double>(value);
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00006998 __ LoadImmediate(IP, Low32Bits(int_value));
6999 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
7000 __ LoadImmediate(IP, High32Bits(int_value));
7001 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
7002 }
Nicolas Geoffray840e5462015-01-07 16:01:24 +00007003 } else {
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00007004 DCHECK(constant->IsFloatConstant()) << constant->DebugName();
Nicolas Geoffray840e5462015-01-07 16:01:24 +00007005 float value = constant->AsFloatConstant()->GetValue();
7006 if (destination.IsFpuRegister()) {
7007 __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
7008 } else {
7009 DCHECK(destination.IsStackSlot());
7010 __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
7011 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
7012 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01007013 }
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007014 }
7015}
7016
7017void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
7018 __ Mov(IP, reg);
7019 __ LoadFromOffset(kLoadWord, reg, SP, mem);
7020 __ StoreToOffset(kStoreWord, IP, SP, mem);
7021}
7022
7023void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
7024 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
7025 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
7026 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
7027 SP, mem1 + stack_offset);
7028 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
7029 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
7030 SP, mem2 + stack_offset);
7031 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
7032}
7033
7034void ParallelMoveResolverARM::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01007035 MoveOperands* move = moves_[index];
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007036 Location source = move->GetSource();
7037 Location destination = move->GetDestination();
7038
7039 if (source.IsRegister() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007040 DCHECK_NE(source.AsRegister<Register>(), IP);
7041 DCHECK_NE(destination.AsRegister<Register>(), IP);
7042 __ Mov(IP, source.AsRegister<Register>());
7043 __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
7044 __ Mov(destination.AsRegister<Register>(), IP);
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007045 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007046 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007047 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007048 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007049 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
7050 Exchange(source.GetStackIndex(), destination.GetStackIndex());
Nicolas Geoffray840e5462015-01-07 16:01:24 +00007051 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Nicolas Geoffraya8eef822015-01-16 11:14:27 +00007052 __ vmovrs(IP, source.AsFpuRegister<SRegister>());
Nicolas Geoffray840e5462015-01-07 16:01:24 +00007053 __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
Nicolas Geoffraya8eef822015-01-16 11:14:27 +00007054 __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007055 } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007056 __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007057 __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007058 __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007059 __ vmovrrd(destination.AsRegisterPairLow<Register>(),
7060 destination.AsRegisterPairHigh<Register>(),
7061 DTMP);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007062 } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007063 Register low_reg = source.IsRegisterPair()
7064 ? source.AsRegisterPairLow<Register>()
7065 : destination.AsRegisterPairLow<Register>();
7066 int mem = source.IsRegisterPair()
7067 ? destination.GetStackIndex()
7068 : source.GetStackIndex();
7069 DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007070 __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007071 __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007072 __ StoreDToOffset(DTMP, SP, mem);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007073 } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007074 DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
7075 DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007076 __ vmovd(DTMP, first);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007077 __ vmovd(first, second);
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007078 __ vmovd(second, DTMP);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007079 } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
7080 DRegister reg = source.IsFpuRegisterPair()
7081 ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
7082 : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
7083 int mem = source.IsFpuRegisterPair()
7084 ? destination.GetStackIndex()
7085 : source.GetStackIndex();
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007086 __ vmovd(DTMP, reg);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007087 __ LoadDFromOffset(reg, SP, mem);
Nicolas Geoffrayffe8a572015-02-11 01:10:39 +00007088 __ StoreDToOffset(DTMP, SP, mem);
Nicolas Geoffray840e5462015-01-07 16:01:24 +00007089 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
7090 SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
7091 : destination.AsFpuRegister<SRegister>();
7092 int mem = source.IsFpuRegister()
7093 ? destination.GetStackIndex()
7094 : source.GetStackIndex();
7095
Nicolas Geoffraya8eef822015-01-16 11:14:27 +00007096 __ vmovrs(IP, reg);
Nicolas Geoffrayf7a0c4e2015-02-10 17:08:47 +00007097 __ LoadSFromOffset(reg, SP, mem);
Nicolas Geoffraya8eef822015-01-16 11:14:27 +00007098 __ StoreToOffset(kStoreWord, IP, SP, mem);
Nicolas Geoffray53f12622015-01-13 18:04:41 +00007099 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Nicolas Geoffray53f12622015-01-13 18:04:41 +00007100 Exchange(source.GetStackIndex(), destination.GetStackIndex());
7101 Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007102 } else {
Nicolas Geoffray53f12622015-01-13 18:04:41 +00007103 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01007104 }
7105}
7106
7107void ParallelMoveResolverARM::SpillScratch(int reg) {
7108 __ Push(static_cast<Register>(reg));
7109}
7110
7111void ParallelMoveResolverARM::RestoreScratch(int reg) {
7112 __ Pop(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01007113}
7114
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007115HLoadClass::LoadKind CodeGeneratorARM::GetSupportedLoadClassKind(
7116 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007117 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007118 case HLoadClass::LoadKind::kInvalid:
7119 LOG(FATAL) << "UNREACHABLE";
7120 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007121 case HLoadClass::LoadKind::kReferrersClass:
7122 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007123 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007124 case HLoadClass::LoadKind::kBssEntry:
7125 DCHECK(!Runtime::Current()->UseJitCompilation());
7126 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007127 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007128 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007129 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007130 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007131 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007132 break;
7133 }
7134 return desired_class_load_kind;
7135}
7136
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007137void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007138 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007139 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007140 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00007141 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007142 cls,
7143 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00007144 Location::RegisterLocation(R0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00007145 DCHECK_EQ(calling_convention.GetRegisterAt(0), R0);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007146 return;
7147 }
Vladimir Marko41559982017-01-06 14:04:23 +00007148 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007149
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007150 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7151 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007152 ? LocationSummary::kCallOnSlowPath
7153 : LocationSummary::kNoCall;
7154 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007155 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007156 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01007157 }
7158
Vladimir Marko41559982017-01-06 14:04:23 +00007159 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007160 locations->SetInAt(0, Location::RequiresRegister());
7161 }
7162 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00007163 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7164 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7165 // Rely on the type resolution or initialization and marking to save everything we need.
7166 // Note that IP may be clobbered by saving/restoring the live register (only one thanks
7167 // to the custom calling convention) or by marking, so we request a different temp.
7168 locations->AddTemp(Location::RequiresRegister());
7169 RegisterSet caller_saves = RegisterSet::Empty();
7170 InvokeRuntimeCallingConvention calling_convention;
7171 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7172 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
7173 // that the the kPrimNot result register is the same as the first argument register.
7174 locations->SetCustomSlowPathCallerSaves(caller_saves);
7175 } else {
7176 // For non-Baker read barrier we have a temp-clobbering call.
7177 }
7178 }
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01007179 if (kUseBakerReadBarrier && kBakerReadBarrierLinkTimeThunksEnableForGcRoots) {
7180 if (load_kind == HLoadClass::LoadKind::kBssEntry ||
7181 (load_kind == HLoadClass::LoadKind::kReferrersClass &&
7182 !Runtime::Current()->UseJitCompilation())) {
7183 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
7184 }
7185 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007186}
7187
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007188// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7189// move.
7190void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007191 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007192 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007193 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01007194 return;
7195 }
Vladimir Marko41559982017-01-06 14:04:23 +00007196 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01007197
Vladimir Marko41559982017-01-06 14:04:23 +00007198 LocationSummary* locations = cls->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00007199 Location out_loc = locations->Out();
7200 Register out = out_loc.AsRegister<Register>();
Roland Levillain3b359c72015-11-17 19:35:12 +00007201
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007202 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7203 ? kWithoutReadBarrier
7204 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007205 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00007206 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007207 case HLoadClass::LoadKind::kReferrersClass: {
7208 DCHECK(!cls->CanCallRuntime());
7209 DCHECK(!cls->MustGenerateClinitCheck());
7210 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7211 Register current_method = locations->InAt(0).AsRegister<Register>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007212 GenerateGcRootFieldLoad(cls,
7213 out_loc,
7214 current_method,
7215 ArtMethod::DeclaringClassOffset().Int32Value(),
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007216 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007217 break;
7218 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007219 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007220 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007221 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007222 CodeGeneratorARM::PcRelativePatchInfo* labels =
7223 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
7224 __ BindTrackedLabel(&labels->movw_label);
7225 __ movw(out, /* placeholder */ 0u);
7226 __ BindTrackedLabel(&labels->movt_label);
7227 __ movt(out, /* placeholder */ 0u);
7228 __ BindTrackedLabel(&labels->add_pc_label);
7229 __ add(out, out, ShifterOperand(PC));
7230 break;
7231 }
7232 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007233 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007234 uint32_t address = dchecked_integral_cast<uint32_t>(
7235 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7236 DCHECK_NE(address, 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007237 __ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7238 break;
7239 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007240 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007241 Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
7242 ? locations->GetTemp(0).AsRegister<Register>()
7243 : out;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007244 CodeGeneratorARM::PcRelativePatchInfo* labels =
Vladimir Marko1998cd02017-01-13 13:02:58 +00007245 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007246 __ BindTrackedLabel(&labels->movw_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +00007247 __ movw(temp, /* placeholder */ 0u);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007248 __ BindTrackedLabel(&labels->movt_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +00007249 __ movt(temp, /* placeholder */ 0u);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007250 __ BindTrackedLabel(&labels->add_pc_label);
Vladimir Markoea4c1262017-02-06 19:59:33 +00007251 __ add(temp, temp, ShifterOperand(PC));
7252 GenerateGcRootFieldLoad(cls, out_loc, temp, /* offset */ 0, read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007253 generate_null_check = true;
7254 break;
7255 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007256 case HLoadClass::LoadKind::kJitTableAddress: {
7257 __ LoadLiteral(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
7258 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007259 cls->GetClass()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007260 // /* GcRoot<mirror::Class> */ out = *out
Vladimir Markoea4c1262017-02-06 19:59:33 +00007261 GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007262 break;
7263 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007264 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007265 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007266 LOG(FATAL) << "UNREACHABLE";
7267 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007268 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007269
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007270 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7271 DCHECK(cls->CanCallRuntime());
Artem Serovf4d6aee2016-07-11 10:41:45 +01007272 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007273 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
7274 codegen_->AddSlowPath(slow_path);
7275 if (generate_null_check) {
7276 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7277 }
7278 if (cls->MustGenerateClinitCheck()) {
7279 GenerateClassInitializationCheck(slow_path, out);
7280 } else {
7281 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007282 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007283 }
7284}
7285
7286void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
7287 LocationSummary* locations =
7288 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
7289 locations->SetInAt(0, Location::RequiresRegister());
7290 if (check->HasUses()) {
7291 locations->SetOut(Location::SameAsFirstInput());
7292 }
7293}
7294
7295void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007296 // We assume the class is not null.
Artem Serovf4d6aee2016-07-11 10:41:45 +01007297 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007298 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007299 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00007300 GenerateClassInitializationCheck(slow_path,
7301 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007302}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007303
Nicolas Geoffray424f6762014-11-03 14:51:25 +00007304void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
Artem Serovf4d6aee2016-07-11 10:41:45 +01007305 SlowPathCodeARM* slow_path, Register class_reg) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01007306 __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
7307 __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
7308 __ b(slow_path->GetEntryLabel(), LT);
7309 // Even if the initialized flag is set, we may be in a situation where caches are not synced
7310 // properly. Therefore, we do a memory fence.
7311 __ dmb(ISH);
7312 __ Bind(slow_path->GetExitLabel());
7313}
7314
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007315HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind(
7316 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007317 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007318 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007319 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01007320 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007321 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007322 case HLoadString::LoadKind::kJitTableAddress:
7323 DCHECK(Runtime::Current()->UseJitCompilation());
7324 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007325 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007326 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007327 break;
7328 }
7329 return desired_string_load_kind;
7330}
7331
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007332void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007333 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007334 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007335 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007336 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007337 locations->SetOut(Location::RegisterLocation(R0));
7338 } else {
7339 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007340 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7341 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00007342 // Rely on the pResolveString and marking to save everything we need, including temps.
7343 // Note that IP may be clobbered by saving/restoring the live register (only one thanks
7344 // to the custom calling convention) or by marking, so we request a different temp.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007345 locations->AddTemp(Location::RequiresRegister());
7346 RegisterSet caller_saves = RegisterSet::Empty();
7347 InvokeRuntimeCallingConvention calling_convention;
7348 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7349 // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
7350 // that the the kPrimNot result register is the same as the first argument register.
7351 locations->SetCustomSlowPathCallerSaves(caller_saves);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01007352 if (kUseBakerReadBarrier && kBakerReadBarrierLinkTimeThunksEnableForGcRoots) {
7353 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
7354 }
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007355 } else {
7356 // For non-Baker read barrier we have a temp-clobbering call.
7357 }
7358 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007359 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007360}
7361
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007362// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7363// move.
7364void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01007365 LocationSummary* locations = load->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00007366 Location out_loc = locations->Out();
7367 Register out = out_loc.AsRegister<Register>();
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007368 HLoadString::LoadKind load_kind = load->GetLoadKind();
Roland Levillain3b359c72015-11-17 19:35:12 +00007369
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007370 switch (load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007371 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007372 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007373 CodeGeneratorARM::PcRelativePatchInfo* labels =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007374 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007375 __ BindTrackedLabel(&labels->movw_label);
7376 __ movw(out, /* placeholder */ 0u);
7377 __ BindTrackedLabel(&labels->movt_label);
7378 __ movt(out, /* placeholder */ 0u);
7379 __ BindTrackedLabel(&labels->add_pc_label);
7380 __ add(out, out, ShifterOperand(PC));
7381 return; // No dex cache slow path.
7382 }
7383 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007384 uint32_t address = dchecked_integral_cast<uint32_t>(
7385 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7386 DCHECK_NE(address, 0u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007387 __ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address));
7388 return; // No dex cache slow path.
7389 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007390 case HLoadString::LoadKind::kBssEntry: {
7391 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoea4c1262017-02-06 19:59:33 +00007392 Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
7393 ? locations->GetTemp(0).AsRegister<Register>()
7394 : out;
Vladimir Markoaad75c62016-10-03 08:46:48 +00007395 CodeGeneratorARM::PcRelativePatchInfo* labels =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007396 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Vladimir Markoaad75c62016-10-03 08:46:48 +00007397 __ BindTrackedLabel(&labels->movw_label);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007398 __ movw(temp, /* placeholder */ 0u);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007399 __ BindTrackedLabel(&labels->movt_label);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007400 __ movt(temp, /* placeholder */ 0u);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007401 __ BindTrackedLabel(&labels->add_pc_label);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007402 __ add(temp, temp, ShifterOperand(PC));
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007403 GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007404 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
7405 codegen_->AddSlowPath(slow_path);
7406 __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
7407 __ Bind(slow_path->GetExitLabel());
7408 return;
7409 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007410 case HLoadString::LoadKind::kJitTableAddress: {
7411 __ LoadLiteral(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007412 load->GetStringIndex(),
7413 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007414 // /* GcRoot<mirror::String> */ out = *out
7415 GenerateGcRootFieldLoad(load, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
7416 return;
7417 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007418 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007419 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007420 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007421
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007422 // TODO: Consider re-adding the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007423 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007424 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01007425 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007426 __ LoadImmediate(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Christina Wadsworthd8ec6db2016-08-30 17:19:14 -07007427 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7428 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00007429}
7430
David Brazdilcb1c0552015-08-04 16:22:25 +01007431static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007432 return Thread::ExceptionOffset<kArmPointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01007433}
7434
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007435void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
7436 LocationSummary* locations =
7437 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7438 locations->SetOut(Location::RequiresRegister());
7439}
7440
7441void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007442 Register out = load->GetLocations()->Out().AsRegister<Register>();
David Brazdilcb1c0552015-08-04 16:22:25 +01007443 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7444}
7445
7446void LocationsBuilderARM::VisitClearException(HClearException* clear) {
7447 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7448}
7449
7450void InstructionCodeGeneratorARM::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007451 __ LoadImmediate(IP, 0);
David Brazdilcb1c0552015-08-04 16:22:25 +01007452 __ StoreToOffset(kStoreWord, IP, TR, GetExceptionTlsOffset());
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007453}
7454
7455void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
7456 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007457 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007458 InvokeRuntimeCallingConvention calling_convention;
7459 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7460}
7461
7462void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01007463 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00007464 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00007465}
7466
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007467// Temp is used for read barrier.
7468static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
7469 if (kEmitCompilerReadBarrier &&
7470 (kUseBakerReadBarrier ||
7471 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
7472 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
7473 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
7474 return 1;
7475 }
7476 return 0;
7477}
7478
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007479// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007480// interface pointer, one for loading the current interface.
7481// The other checks have one temp for loading the object's class.
7482static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
7483 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
7484 return 3;
7485 }
7486 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillainc9285912015-12-18 10:38:42 +00007487}
7488
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007489void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007490 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain3b359c72015-11-17 19:35:12 +00007491 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01007492 bool baker_read_barrier_slow_path = false;
Roland Levillain3b359c72015-11-17 19:35:12 +00007493 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007494 case TypeCheckKind::kExactCheck:
7495 case TypeCheckKind::kAbstractClassCheck:
7496 case TypeCheckKind::kClassHierarchyCheck:
7497 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain3b359c72015-11-17 19:35:12 +00007498 call_kind =
7499 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01007500 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007501 break;
7502 case TypeCheckKind::kArrayCheck:
Roland Levillain3b359c72015-11-17 19:35:12 +00007503 case TypeCheckKind::kUnresolvedCheck:
7504 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007505 call_kind = LocationSummary::kCallOnSlowPath;
7506 break;
7507 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007508
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007509 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01007510 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007511 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01007512 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007513 locations->SetInAt(0, Location::RequiresRegister());
7514 locations->SetInAt(1, Location::RequiresRegister());
7515 // The "out" register is used as a temporary, so it overlaps with the inputs.
7516 // Note that TypeCheckSlowPathARM uses this register too.
7517 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007518 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01007519 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
7520 codegen_->MaybeAddBakerCcEntrypointTempForFields(locations);
7521 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007522}
7523
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007524void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillainc9285912015-12-18 10:38:42 +00007525 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007526 LocationSummary* locations = instruction->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00007527 Location obj_loc = locations->InAt(0);
7528 Register obj = obj_loc.AsRegister<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00007529 Register cls = locations->InAt(1).AsRegister<Register>();
Roland Levillain3b359c72015-11-17 19:35:12 +00007530 Location out_loc = locations->Out();
7531 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007532 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7533 DCHECK_LE(num_temps, 1u);
7534 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007535 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007536 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7537 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7538 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007539 Label done;
7540 Label* const final_label = codegen_->GetFinalLabel(instruction, &done);
Artem Serovf4d6aee2016-07-11 10:41:45 +01007541 SlowPathCodeARM* slow_path = nullptr;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007542
7543 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007544 // avoid null check if we know obj is not null.
7545 if (instruction->MustDoNullCheck()) {
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007546 DCHECK_NE(out, obj);
7547 __ LoadImmediate(out, 0);
7548 __ CompareAndBranchIfZero(obj, final_label);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007549 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007550
Roland Levillainc9285912015-12-18 10:38:42 +00007551 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007552 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007553 // /* HeapReference<Class> */ out = obj->klass_
7554 GenerateReferenceLoadTwoRegisters(instruction,
7555 out_loc,
7556 obj_loc,
7557 class_offset,
7558 maybe_temp_loc,
7559 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007560 // Classes must be equal for the instanceof to succeed.
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007561 __ cmp(out, ShifterOperand(cls));
7562 // We speculatively set the result to false without changing the condition
7563 // flags, which allows us to avoid some branching later.
7564 __ mov(out, ShifterOperand(0), AL, kCcKeep);
7565
7566 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7567 // we check that the output is in a low register, so that a 16-bit MOV
7568 // encoding can be used.
7569 if (ArmAssembler::IsLowRegister(out)) {
7570 __ it(EQ);
7571 __ mov(out, ShifterOperand(1), EQ);
7572 } else {
7573 __ b(final_label, NE);
7574 __ LoadImmediate(out, 1);
7575 }
7576
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007577 break;
7578 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007579
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007580 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007581 // /* HeapReference<Class> */ out = obj->klass_
7582 GenerateReferenceLoadTwoRegisters(instruction,
7583 out_loc,
7584 obj_loc,
7585 class_offset,
7586 maybe_temp_loc,
7587 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007588 // If the class is abstract, we eagerly fetch the super class of the
7589 // object to avoid doing a comparison we know will fail.
7590 Label loop;
7591 __ Bind(&loop);
Roland Levillain3b359c72015-11-17 19:35:12 +00007592 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007593 GenerateReferenceLoadOneRegister(instruction,
7594 out_loc,
7595 super_offset,
7596 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007597 kCompilerReadBarrierOption);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007598 // If `out` is null, we use it for the result, and jump to the final label.
Anton Kirilov6f644202017-02-27 18:29:45 +00007599 __ CompareAndBranchIfZero(out, final_label);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007600 __ cmp(out, ShifterOperand(cls));
7601 __ b(&loop, NE);
7602 __ LoadImmediate(out, 1);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007603 break;
7604 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007605
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007606 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007607 // /* HeapReference<Class> */ out = obj->klass_
7608 GenerateReferenceLoadTwoRegisters(instruction,
7609 out_loc,
7610 obj_loc,
7611 class_offset,
7612 maybe_temp_loc,
7613 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007614 // Walk over the class hierarchy to find a match.
7615 Label loop, success;
7616 __ Bind(&loop);
7617 __ cmp(out, ShifterOperand(cls));
7618 __ b(&success, EQ);
Roland Levillain3b359c72015-11-17 19:35:12 +00007619 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007620 GenerateReferenceLoadOneRegister(instruction,
7621 out_loc,
7622 super_offset,
7623 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007624 kCompilerReadBarrierOption);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007625 // This is essentially a null check, but it sets the condition flags to the
7626 // proper value for the code that follows the loop, i.e. not `EQ`.
7627 __ cmp(out, ShifterOperand(1));
7628 __ b(&loop, HS);
7629
7630 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7631 // we check that the output is in a low register, so that a 16-bit MOV
7632 // encoding can be used.
7633 if (ArmAssembler::IsLowRegister(out)) {
7634 // If `out` is null, we use it for the result, and the condition flags
7635 // have already been set to `NE`, so the IT block that comes afterwards
7636 // (and which handles the successful case) turns into a NOP (instead of
7637 // overwriting `out`).
7638 __ Bind(&success);
7639 // There is only one branch to the `success` label (which is bound to this
7640 // IT block), and it has the same condition, `EQ`, so in that case the MOV
7641 // is executed.
7642 __ it(EQ);
7643 __ mov(out, ShifterOperand(1), EQ);
7644 } else {
7645 // If `out` is null, we use it for the result, and jump to the final label.
Anton Kirilov6f644202017-02-27 18:29:45 +00007646 __ b(final_label);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007647 __ Bind(&success);
7648 __ LoadImmediate(out, 1);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007649 }
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007650
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007651 break;
7652 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007653
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007654 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007655 // /* HeapReference<Class> */ out = obj->klass_
7656 GenerateReferenceLoadTwoRegisters(instruction,
7657 out_loc,
7658 obj_loc,
7659 class_offset,
7660 maybe_temp_loc,
7661 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007662 // Do an exact check.
7663 Label exact_check;
7664 __ cmp(out, ShifterOperand(cls));
7665 __ b(&exact_check, EQ);
Roland Levillain3b359c72015-11-17 19:35:12 +00007666 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain3b359c72015-11-17 19:35:12 +00007667 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007668 GenerateReferenceLoadOneRegister(instruction,
7669 out_loc,
7670 component_offset,
7671 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007672 kCompilerReadBarrierOption);
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007673 // If `out` is null, we use it for the result, and jump to the final label.
Anton Kirilov6f644202017-02-27 18:29:45 +00007674 __ CompareAndBranchIfZero(out, final_label);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007675 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7676 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Anton Kirilov1e7bb5a2017-03-17 12:30:44 +00007677 __ cmp(out, ShifterOperand(0));
7678 // We speculatively set the result to false without changing the condition
7679 // flags, which allows us to avoid some branching later.
7680 __ mov(out, ShifterOperand(0), AL, kCcKeep);
7681
7682 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
7683 // we check that the output is in a low register, so that a 16-bit MOV
7684 // encoding can be used.
7685 if (ArmAssembler::IsLowRegister(out)) {
7686 __ Bind(&exact_check);
7687 __ it(EQ);
7688 __ mov(out, ShifterOperand(1), EQ);
7689 } else {
7690 __ b(final_label, NE);
7691 __ Bind(&exact_check);
7692 __ LoadImmediate(out, 1);
7693 }
7694
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007695 break;
7696 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007697
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007698 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08007699 // No read barrier since the slow path will retry upon failure.
7700 // /* HeapReference<Class> */ out = obj->klass_
7701 GenerateReferenceLoadTwoRegisters(instruction,
7702 out_loc,
7703 obj_loc,
7704 class_offset,
7705 maybe_temp_loc,
7706 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007707 __ cmp(out, ShifterOperand(cls));
7708 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain3b359c72015-11-17 19:35:12 +00007709 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(instruction,
7710 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007711 codegen_->AddSlowPath(slow_path);
7712 __ b(slow_path->GetEntryLabel(), NE);
7713 __ LoadImmediate(out, 1);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007714 break;
7715 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007716
Calin Juravle98893e12015-10-02 21:05:03 +01007717 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain3b359c72015-11-17 19:35:12 +00007718 case TypeCheckKind::kInterfaceCheck: {
7719 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007720 // into the slow path for the unresolved and interface check
Roland Levillain3b359c72015-11-17 19:35:12 +00007721 // cases.
7722 //
7723 // We cannot directly call the InstanceofNonTrivial runtime
7724 // entry point without resorting to a type checking slow path
7725 // here (i.e. by calling InvokeRuntime directly), as it would
7726 // require to assign fixed registers for the inputs of this
7727 // HInstanceOf instruction (following the runtime calling
7728 // convention), which might be cluttered by the potential first
7729 // read barrier emission at the beginning of this method.
Roland Levillainc9285912015-12-18 10:38:42 +00007730 //
7731 // TODO: Introduce a new runtime entry point taking the object
7732 // to test (instead of its class) as argument, and let it deal
7733 // with the read barrier issues. This will let us refactor this
7734 // case of the `switch` code as it was previously (with a direct
7735 // call to the runtime not using a type checking slow path).
7736 // This should also be beneficial for the other cases above.
Roland Levillain3b359c72015-11-17 19:35:12 +00007737 DCHECK(locations->OnlyCallsOnSlowPath());
7738 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(instruction,
7739 /* is_fatal */ false);
7740 codegen_->AddSlowPath(slow_path);
7741 __ b(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007742 break;
7743 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007744 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01007745
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007746 if (done.IsLinked()) {
7747 __ Bind(&done);
7748 }
7749
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007750 if (slow_path != nullptr) {
7751 __ Bind(slow_path->GetExitLabel());
7752 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00007753}
7754
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007755void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007756 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7757 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
7758
Roland Levillain3b359c72015-11-17 19:35:12 +00007759 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
7760 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007761 case TypeCheckKind::kExactCheck:
7762 case TypeCheckKind::kAbstractClassCheck:
7763 case TypeCheckKind::kClassHierarchyCheck:
7764 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain3b359c72015-11-17 19:35:12 +00007765 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
7766 LocationSummary::kCallOnSlowPath :
7767 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007768 break;
7769 case TypeCheckKind::kArrayCheck:
Roland Levillain3b359c72015-11-17 19:35:12 +00007770 case TypeCheckKind::kUnresolvedCheck:
7771 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007772 call_kind = LocationSummary::kCallOnSlowPath;
7773 break;
7774 }
7775
Roland Levillain3b359c72015-11-17 19:35:12 +00007776 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
7777 locations->SetInAt(0, Location::RequiresRegister());
7778 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007779 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007780}
7781
7782void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
Roland Levillainc9285912015-12-18 10:38:42 +00007783 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007784 LocationSummary* locations = instruction->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +00007785 Location obj_loc = locations->InAt(0);
7786 Register obj = obj_loc.AsRegister<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00007787 Register cls = locations->InAt(1).AsRegister<Register>();
Roland Levillain3b359c72015-11-17 19:35:12 +00007788 Location temp_loc = locations->GetTemp(0);
7789 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007790 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
7791 DCHECK_LE(num_temps, 3u);
7792 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
7793 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
7794 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7795 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7796 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7797 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
7798 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
7799 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
7800 const uint32_t object_array_data_offset =
7801 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007802
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007803 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
7804 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
7805 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007806 bool is_type_check_slow_path_fatal = false;
7807 if (!kEmitCompilerReadBarrier) {
7808 is_type_check_slow_path_fatal =
7809 (type_check_kind == TypeCheckKind::kExactCheck ||
7810 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
7811 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
7812 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
7813 !instruction->CanThrowIntoCatchBlock();
7814 }
Artem Serovf4d6aee2016-07-11 10:41:45 +01007815 SlowPathCodeARM* type_check_slow_path =
Roland Levillain3b359c72015-11-17 19:35:12 +00007816 new (GetGraph()->GetArena()) TypeCheckSlowPathARM(instruction,
7817 is_type_check_slow_path_fatal);
7818 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007819
7820 Label done;
Anton Kirilov6f644202017-02-27 18:29:45 +00007821 Label* final_label = codegen_->GetFinalLabel(instruction, &done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007822 // Avoid null check if we know obj is not null.
7823 if (instruction->MustDoNullCheck()) {
Anton Kirilov6f644202017-02-27 18:29:45 +00007824 __ CompareAndBranchIfZero(obj, final_label);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007825 }
7826
Roland Levillain3b359c72015-11-17 19:35:12 +00007827 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007828 case TypeCheckKind::kExactCheck:
7829 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007830 // /* HeapReference<Class> */ temp = obj->klass_
7831 GenerateReferenceLoadTwoRegisters(instruction,
7832 temp_loc,
7833 obj_loc,
7834 class_offset,
7835 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007836 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007837
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007838 __ cmp(temp, ShifterOperand(cls));
7839 // Jump to slow path for throwing the exception or doing a
7840 // more involved array check.
Roland Levillain3b359c72015-11-17 19:35:12 +00007841 __ b(type_check_slow_path->GetEntryLabel(), NE);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007842 break;
7843 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007844
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007845 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007846 // /* HeapReference<Class> */ temp = obj->klass_
7847 GenerateReferenceLoadTwoRegisters(instruction,
7848 temp_loc,
7849 obj_loc,
7850 class_offset,
7851 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007852 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007853
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007854 // If the class is abstract, we eagerly fetch the super class of the
7855 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007856 Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007857 __ Bind(&loop);
Roland Levillain3b359c72015-11-17 19:35:12 +00007858 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007859 GenerateReferenceLoadOneRegister(instruction,
7860 temp_loc,
7861 super_offset,
7862 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007863 kWithoutReadBarrier);
Roland Levillain3b359c72015-11-17 19:35:12 +00007864
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007865 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7866 // exception.
7867 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
Roland Levillain3b359c72015-11-17 19:35:12 +00007868
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007869 // Otherwise, compare the classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007870 __ cmp(temp, ShifterOperand(cls));
7871 __ b(&loop, NE);
7872 break;
7873 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007874
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007875 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007876 // /* HeapReference<Class> */ temp = obj->klass_
7877 GenerateReferenceLoadTwoRegisters(instruction,
7878 temp_loc,
7879 obj_loc,
7880 class_offset,
7881 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007882 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007883
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007884 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007885 Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007886 __ Bind(&loop);
7887 __ cmp(temp, ShifterOperand(cls));
Anton Kirilov6f644202017-02-27 18:29:45 +00007888 __ b(final_label, EQ);
Roland Levillain3b359c72015-11-17 19:35:12 +00007889
Roland Levillain3b359c72015-11-17 19:35:12 +00007890 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007891 GenerateReferenceLoadOneRegister(instruction,
7892 temp_loc,
7893 super_offset,
7894 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007895 kWithoutReadBarrier);
Roland Levillain3b359c72015-11-17 19:35:12 +00007896
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007897 // If the class reference currently in `temp` is null, jump to the slow path to throw the
7898 // exception.
7899 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
7900 // Otherwise, jump to the beginning of the loop.
7901 __ b(&loop);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007902 break;
7903 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007904
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007905 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007906 // /* HeapReference<Class> */ temp = obj->klass_
7907 GenerateReferenceLoadTwoRegisters(instruction,
7908 temp_loc,
7909 obj_loc,
7910 class_offset,
7911 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007912 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007913
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01007914 // Do an exact check.
7915 __ cmp(temp, ShifterOperand(cls));
Anton Kirilov6f644202017-02-27 18:29:45 +00007916 __ b(final_label, EQ);
Roland Levillain3b359c72015-11-17 19:35:12 +00007917
7918 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain3b359c72015-11-17 19:35:12 +00007919 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007920 GenerateReferenceLoadOneRegister(instruction,
7921 temp_loc,
7922 component_offset,
7923 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007924 kWithoutReadBarrier);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007925 // If the component type is null, jump to the slow path to throw the exception.
7926 __ CompareAndBranchIfZero(temp, type_check_slow_path->GetEntryLabel());
7927 // Otherwise,the object is indeed an array, jump to label `check_non_primitive_component_type`
7928 // to further check that this component type is not a primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007929 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
Roland Levillain3b359c72015-11-17 19:35:12 +00007930 static_assert(Primitive::kPrimNot == 0, "Expected 0 for art::Primitive::kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08007931 __ CompareAndBranchIfNonZero(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007932 break;
7933 }
Roland Levillain3b359c72015-11-17 19:35:12 +00007934
Calin Juravle98893e12015-10-02 21:05:03 +01007935 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007936 // We always go into the type check slow path for the unresolved check case.
Roland Levillain3b359c72015-11-17 19:35:12 +00007937 // We cannot directly call the CheckCast runtime entry point
7938 // without resorting to a type checking slow path here (i.e. by
7939 // calling InvokeRuntime directly), as it would require to
7940 // assign fixed registers for the inputs of this HInstanceOf
7941 // instruction (following the runtime calling convention), which
7942 // might be cluttered by the potential first read barrier
7943 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007944
Roland Levillain3b359c72015-11-17 19:35:12 +00007945 __ b(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007946 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007947
7948 case TypeCheckKind::kInterfaceCheck: {
7949 // Avoid read barriers to improve performance of the fast path. We can not get false
7950 // positives by doing this.
7951 // /* HeapReference<Class> */ temp = obj->klass_
7952 GenerateReferenceLoadTwoRegisters(instruction,
7953 temp_loc,
7954 obj_loc,
7955 class_offset,
7956 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007957 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007958
7959 // /* HeapReference<Class> */ temp = temp->iftable_
7960 GenerateReferenceLoadTwoRegisters(instruction,
7961 temp_loc,
7962 temp_loc,
7963 iftable_offset,
7964 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007965 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08007966 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007967 __ ldr(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08007968 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007969 Label start_loop;
7970 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08007971 __ CompareAndBranchIfZero(maybe_temp2_loc.AsRegister<Register>(),
7972 type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007973 __ ldr(maybe_temp3_loc.AsRegister<Register>(), Address(temp, object_array_data_offset));
7974 __ MaybeUnpoisonHeapReference(maybe_temp3_loc.AsRegister<Register>());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007975 // Go to next interface.
7976 __ add(temp, temp, ShifterOperand(2 * kHeapReferenceSize));
7977 __ sub(maybe_temp2_loc.AsRegister<Register>(),
7978 maybe_temp2_loc.AsRegister<Register>(),
7979 ShifterOperand(2));
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08007980 // Compare the classes and continue the loop if they do not match.
7981 __ cmp(cls, ShifterOperand(maybe_temp3_loc.AsRegister<Register>()));
7982 __ b(&start_loop, NE);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07007983 break;
7984 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007985 }
Anton Kirilov6f644202017-02-27 18:29:45 +00007986
7987 if (done.IsLinked()) {
7988 __ Bind(&done);
7989 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00007990
Roland Levillain3b359c72015-11-17 19:35:12 +00007991 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00007992}
7993
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007994void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
7995 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007996 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00007997 InvokeRuntimeCallingConvention calling_convention;
7998 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7999}
8000
8001void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01008002 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
8003 instruction,
8004 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008005 if (instruction->IsEnter()) {
8006 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
8007 } else {
8008 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8009 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00008010}
8011
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008012void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction, AND); }
8013void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction, ORR); }
8014void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction, EOR); }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008015
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008016void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction, Opcode opcode) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008017 LocationSummary* locations =
8018 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8019 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
8020 || instruction->GetResultType() == Primitive::kPrimLong);
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008021 // Note: GVN reorders commutative operations to have the constant on the right hand side.
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008022 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008023 locations->SetInAt(1, ArmEncodableConstantOrRegister(instruction->InputAt(1), opcode));
Nicolas Geoffray829280c2015-01-28 10:20:37 +00008024 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008025}
8026
8027void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
8028 HandleBitwiseOperation(instruction);
8029}
8030
8031void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
8032 HandleBitwiseOperation(instruction);
8033}
8034
8035void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
8036 HandleBitwiseOperation(instruction);
8037}
8038
Artem Serov7fc63502016-02-09 17:15:29 +00008039
8040void LocationsBuilderARM::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
8041 LocationSummary* locations =
8042 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8043 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
8044 || instruction->GetResultType() == Primitive::kPrimLong);
8045
8046 locations->SetInAt(0, Location::RequiresRegister());
8047 locations->SetInAt(1, Location::RequiresRegister());
8048 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8049}
8050
8051void InstructionCodeGeneratorARM::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
8052 LocationSummary* locations = instruction->GetLocations();
8053 Location first = locations->InAt(0);
8054 Location second = locations->InAt(1);
8055 Location out = locations->Out();
8056
8057 if (instruction->GetResultType() == Primitive::kPrimInt) {
8058 Register first_reg = first.AsRegister<Register>();
8059 ShifterOperand second_reg(second.AsRegister<Register>());
8060 Register out_reg = out.AsRegister<Register>();
8061
8062 switch (instruction->GetOpKind()) {
8063 case HInstruction::kAnd:
8064 __ bic(out_reg, first_reg, second_reg);
8065 break;
8066 case HInstruction::kOr:
8067 __ orn(out_reg, first_reg, second_reg);
8068 break;
8069 // There is no EON on arm.
8070 case HInstruction::kXor:
8071 default:
8072 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8073 UNREACHABLE();
8074 }
8075 return;
8076
8077 } else {
8078 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
8079 Register first_low = first.AsRegisterPairLow<Register>();
8080 Register first_high = first.AsRegisterPairHigh<Register>();
8081 ShifterOperand second_low(second.AsRegisterPairLow<Register>());
8082 ShifterOperand second_high(second.AsRegisterPairHigh<Register>());
8083 Register out_low = out.AsRegisterPairLow<Register>();
8084 Register out_high = out.AsRegisterPairHigh<Register>();
8085
8086 switch (instruction->GetOpKind()) {
8087 case HInstruction::kAnd:
8088 __ bic(out_low, first_low, second_low);
8089 __ bic(out_high, first_high, second_high);
8090 break;
8091 case HInstruction::kOr:
8092 __ orn(out_low, first_low, second_low);
8093 __ orn(out_high, first_high, second_high);
8094 break;
8095 // There is no EON on arm.
8096 case HInstruction::kXor:
8097 default:
8098 LOG(FATAL) << "Unexpected instruction " << instruction->DebugName();
8099 UNREACHABLE();
8100 }
8101 }
8102}
8103
Anton Kirilov74234da2017-01-13 14:42:47 +00008104void LocationsBuilderARM::VisitDataProcWithShifterOp(
8105 HDataProcWithShifterOp* instruction) {
8106 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
8107 instruction->GetType() == Primitive::kPrimLong);
8108 LocationSummary* locations =
8109 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8110 const bool overlap = instruction->GetType() == Primitive::kPrimLong &&
8111 HDataProcWithShifterOp::IsExtensionOp(instruction->GetOpKind());
8112
8113 locations->SetInAt(0, Location::RequiresRegister());
8114 locations->SetInAt(1, Location::RequiresRegister());
8115 locations->SetOut(Location::RequiresRegister(),
8116 overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap);
8117}
8118
8119void InstructionCodeGeneratorARM::VisitDataProcWithShifterOp(
8120 HDataProcWithShifterOp* instruction) {
8121 const LocationSummary* const locations = instruction->GetLocations();
8122 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
8123 const HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
8124 const Location left = locations->InAt(0);
8125 const Location right = locations->InAt(1);
8126 const Location out = locations->Out();
8127
8128 if (instruction->GetType() == Primitive::kPrimInt) {
8129 DCHECK(!HDataProcWithShifterOp::IsExtensionOp(op_kind));
8130
8131 const Register second = instruction->InputAt(1)->GetType() == Primitive::kPrimLong
8132 ? right.AsRegisterPairLow<Register>()
8133 : right.AsRegister<Register>();
8134
8135 GenerateDataProcInstruction(kind,
8136 out.AsRegister<Register>(),
8137 left.AsRegister<Register>(),
8138 ShifterOperand(second,
8139 ShiftFromOpKind(op_kind),
8140 instruction->GetShiftAmount()),
8141 codegen_);
8142 } else {
8143 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
8144
8145 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
8146 const Register second = right.AsRegister<Register>();
8147
8148 DCHECK_NE(out.AsRegisterPairLow<Register>(), second);
8149 GenerateDataProc(kind,
8150 out,
8151 left,
8152 ShifterOperand(second),
8153 ShifterOperand(second, ASR, 31),
8154 codegen_);
8155 } else {
8156 GenerateLongDataProc(instruction, codegen_);
8157 }
8158 }
8159}
8160
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008161void InstructionCodeGeneratorARM::GenerateAndConst(Register out, Register first, uint32_t value) {
8162 // Optimize special cases for individual halfs of `and-long` (`and` is simplified earlier).
8163 if (value == 0xffffffffu) {
8164 if (out != first) {
8165 __ mov(out, ShifterOperand(first));
8166 }
8167 return;
8168 }
8169 if (value == 0u) {
8170 __ mov(out, ShifterOperand(0));
8171 return;
8172 }
8173 ShifterOperand so;
8174 if (__ ShifterOperandCanHold(kNoRegister, kNoRegister, AND, value, &so)) {
8175 __ and_(out, first, so);
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008176 } else if (__ ShifterOperandCanHold(kNoRegister, kNoRegister, BIC, ~value, &so)) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008177 __ bic(out, first, ShifterOperand(~value));
Anton Kiriloveffd5bf2017-02-28 16:59:15 +00008178 } else {
8179 DCHECK(IsPowerOfTwo(value + 1));
8180 __ ubfx(out, first, 0, WhichPowerOf2(value + 1));
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008181 }
8182}
8183
8184void InstructionCodeGeneratorARM::GenerateOrrConst(Register out, Register first, uint32_t value) {
8185 // Optimize special cases for individual halfs of `or-long` (`or` is simplified earlier).
8186 if (value == 0u) {
8187 if (out != first) {
8188 __ mov(out, ShifterOperand(first));
8189 }
8190 return;
8191 }
8192 if (value == 0xffffffffu) {
8193 __ mvn(out, ShifterOperand(0));
8194 return;
8195 }
8196 ShifterOperand so;
8197 if (__ ShifterOperandCanHold(kNoRegister, kNoRegister, ORR, value, &so)) {
8198 __ orr(out, first, so);
8199 } else {
8200 DCHECK(__ ShifterOperandCanHold(kNoRegister, kNoRegister, ORN, ~value, &so));
8201 __ orn(out, first, ShifterOperand(~value));
8202 }
8203}
8204
8205void InstructionCodeGeneratorARM::GenerateEorConst(Register out, Register first, uint32_t value) {
8206 // Optimize special case for individual halfs of `xor-long` (`xor` is simplified earlier).
8207 if (value == 0u) {
8208 if (out != first) {
8209 __ mov(out, ShifterOperand(first));
8210 }
8211 return;
8212 }
8213 __ eor(out, first, ShifterOperand(value));
8214}
8215
Vladimir Marko59751a72016-08-05 14:37:27 +01008216void InstructionCodeGeneratorARM::GenerateAddLongConst(Location out,
8217 Location first,
8218 uint64_t value) {
8219 Register out_low = out.AsRegisterPairLow<Register>();
8220 Register out_high = out.AsRegisterPairHigh<Register>();
8221 Register first_low = first.AsRegisterPairLow<Register>();
8222 Register first_high = first.AsRegisterPairHigh<Register>();
8223 uint32_t value_low = Low32Bits(value);
8224 uint32_t value_high = High32Bits(value);
8225 if (value_low == 0u) {
8226 if (out_low != first_low) {
8227 __ mov(out_low, ShifterOperand(first_low));
8228 }
8229 __ AddConstant(out_high, first_high, value_high);
8230 return;
8231 }
8232 __ AddConstantSetFlags(out_low, first_low, value_low);
8233 ShifterOperand so;
8234 if (__ ShifterOperandCanHold(out_high, first_high, ADC, value_high, kCcDontCare, &so)) {
8235 __ adc(out_high, first_high, so);
8236 } else if (__ ShifterOperandCanHold(out_low, first_low, SBC, ~value_high, kCcDontCare, &so)) {
8237 __ sbc(out_high, first_high, so);
8238 } else {
8239 LOG(FATAL) << "Unexpected constant " << value_high;
8240 UNREACHABLE();
8241 }
8242}
8243
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008244void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
8245 LocationSummary* locations = instruction->GetLocations();
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008246 Location first = locations->InAt(0);
8247 Location second = locations->InAt(1);
8248 Location out = locations->Out();
8249
8250 if (second.IsConstant()) {
8251 uint64_t value = static_cast<uint64_t>(Int64FromConstant(second.GetConstant()));
8252 uint32_t value_low = Low32Bits(value);
8253 if (instruction->GetResultType() == Primitive::kPrimInt) {
8254 Register first_reg = first.AsRegister<Register>();
8255 Register out_reg = out.AsRegister<Register>();
8256 if (instruction->IsAnd()) {
8257 GenerateAndConst(out_reg, first_reg, value_low);
8258 } else if (instruction->IsOr()) {
8259 GenerateOrrConst(out_reg, first_reg, value_low);
8260 } else {
8261 DCHECK(instruction->IsXor());
8262 GenerateEorConst(out_reg, first_reg, value_low);
8263 }
8264 } else {
8265 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
8266 uint32_t value_high = High32Bits(value);
8267 Register first_low = first.AsRegisterPairLow<Register>();
8268 Register first_high = first.AsRegisterPairHigh<Register>();
8269 Register out_low = out.AsRegisterPairLow<Register>();
8270 Register out_high = out.AsRegisterPairHigh<Register>();
8271 if (instruction->IsAnd()) {
8272 GenerateAndConst(out_low, first_low, value_low);
8273 GenerateAndConst(out_high, first_high, value_high);
8274 } else if (instruction->IsOr()) {
8275 GenerateOrrConst(out_low, first_low, value_low);
8276 GenerateOrrConst(out_high, first_high, value_high);
8277 } else {
8278 DCHECK(instruction->IsXor());
8279 GenerateEorConst(out_low, first_low, value_low);
8280 GenerateEorConst(out_high, first_high, value_high);
8281 }
8282 }
8283 return;
8284 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008285
8286 if (instruction->GetResultType() == Primitive::kPrimInt) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008287 Register first_reg = first.AsRegister<Register>();
8288 ShifterOperand second_reg(second.AsRegister<Register>());
8289 Register out_reg = out.AsRegister<Register>();
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008290 if (instruction->IsAnd()) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008291 __ and_(out_reg, first_reg, second_reg);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008292 } else if (instruction->IsOr()) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008293 __ orr(out_reg, first_reg, second_reg);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008294 } else {
8295 DCHECK(instruction->IsXor());
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008296 __ eor(out_reg, first_reg, second_reg);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008297 }
8298 } else {
8299 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008300 Register first_low = first.AsRegisterPairLow<Register>();
8301 Register first_high = first.AsRegisterPairHigh<Register>();
8302 ShifterOperand second_low(second.AsRegisterPairLow<Register>());
8303 ShifterOperand second_high(second.AsRegisterPairHigh<Register>());
8304 Register out_low = out.AsRegisterPairLow<Register>();
8305 Register out_high = out.AsRegisterPairHigh<Register>();
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008306 if (instruction->IsAnd()) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008307 __ and_(out_low, first_low, second_low);
8308 __ and_(out_high, first_high, second_high);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008309 } else if (instruction->IsOr()) {
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008310 __ orr(out_low, first_low, second_low);
8311 __ orr(out_high, first_high, second_high);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008312 } else {
8313 DCHECK(instruction->IsXor());
Vladimir Markod2b4ca22015-09-14 15:13:26 +01008314 __ eor(out_low, first_low, second_low);
8315 __ eor(out_high, first_high, second_high);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00008316 }
8317 }
8318}
8319
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008320void InstructionCodeGeneratorARM::GenerateReferenceLoadOneRegister(
8321 HInstruction* instruction,
8322 Location out,
8323 uint32_t offset,
8324 Location maybe_temp,
8325 ReadBarrierOption read_barrier_option) {
Roland Levillainc9285912015-12-18 10:38:42 +00008326 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008327 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08008328 CHECK(kEmitCompilerReadBarrier);
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008329 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillainc9285912015-12-18 10:38:42 +00008330 if (kUseBakerReadBarrier) {
8331 // Load with fast path based Baker's read barrier.
8332 // /* HeapReference<Object> */ out = *(out + offset)
8333 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008334 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +00008335 } else {
8336 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008337 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillainc9285912015-12-18 10:38:42 +00008338 // in the following move operation, as we will need it for the
8339 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008340 __ Mov(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillainc9285912015-12-18 10:38:42 +00008341 // /* HeapReference<Object> */ out = *(out + offset)
8342 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008343 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillainc9285912015-12-18 10:38:42 +00008344 }
8345 } else {
8346 // Plain load with no read barrier.
8347 // /* HeapReference<Object> */ out = *(out + offset)
8348 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
8349 __ MaybeUnpoisonHeapReference(out_reg);
8350 }
8351}
8352
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008353void InstructionCodeGeneratorARM::GenerateReferenceLoadTwoRegisters(
8354 HInstruction* instruction,
8355 Location out,
8356 Location obj,
8357 uint32_t offset,
8358 Location maybe_temp,
8359 ReadBarrierOption read_barrier_option) {
Roland Levillainc9285912015-12-18 10:38:42 +00008360 Register out_reg = out.AsRegister<Register>();
8361 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008362 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08008363 CHECK(kEmitCompilerReadBarrier);
Roland Levillainc9285912015-12-18 10:38:42 +00008364 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008365 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillainc9285912015-12-18 10:38:42 +00008366 // Load with fast path based Baker's read barrier.
8367 // /* HeapReference<Object> */ out = *(obj + offset)
8368 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00008369 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +00008370 } else {
8371 // Load with slow path based read barrier.
8372 // /* HeapReference<Object> */ out = *(obj + offset)
8373 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8374 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
8375 }
8376 } else {
8377 // Plain load with no read barrier.
8378 // /* HeapReference<Object> */ out = *(obj + offset)
8379 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
8380 __ MaybeUnpoisonHeapReference(out_reg);
8381 }
8382}
8383
8384void InstructionCodeGeneratorARM::GenerateGcRootFieldLoad(HInstruction* instruction,
8385 Location root,
8386 Register obj,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07008387 uint32_t offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008388 ReadBarrierOption read_barrier_option) {
Roland Levillainc9285912015-12-18 10:38:42 +00008389 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08008390 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07008391 DCHECK(kEmitCompilerReadBarrier);
Roland Levillainc9285912015-12-18 10:38:42 +00008392 if (kUseBakerReadBarrier) {
8393 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00008394 // Baker's read barrier are used.
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008395 if (kBakerReadBarrierLinkTimeThunksEnableForGcRoots &&
8396 !Runtime::Current()->UseJitCompilation()) {
8397 // Note that we do not actually check the value of `GetIsGcMarking()`
8398 // to decide whether to mark the loaded GC root or not. Instead, we
8399 // load into `temp` (actually kBakerCcEntrypointRegister) the read
8400 // barrier mark introspection entrypoint. If `temp` is null, it means
8401 // that `GetIsGcMarking()` is false, and vice versa.
8402 //
8403 // We use link-time generated thunks for the slow path. That thunk
8404 // checks the reference and jumps to the entrypoint if needed.
8405 //
8406 // temp = Thread::Current()->pReadBarrierMarkIntrospection
8407 // lr = &return_address;
8408 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
8409 // if (temp != nullptr) {
8410 // goto gc_root_thunk<root_reg>(lr)
8411 // }
8412 // return_address:
Roland Levillainc9285912015-12-18 10:38:42 +00008413
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008414 CheckLastTempIsBakerCcEntrypointRegister(instruction);
Vladimir Marko88abba22017-05-03 17:09:25 +01008415 bool narrow = CanEmitNarrowLdr(root_reg, obj, offset);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008416 uint32_t custom_data =
Vladimir Marko88abba22017-05-03 17:09:25 +01008417 linker::Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg, narrow);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008418 Label* bne_label = codegen_->NewBakerReadBarrierPatch(custom_data);
Roland Levillainba650a42017-03-06 13:52:32 +00008419
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008420 // entrypoint_reg =
8421 // Thread::Current()->pReadBarrierMarkReg12, i.e. pReadBarrierMarkIntrospection.
8422 DCHECK_EQ(IP, 12);
8423 const int32_t entry_point_offset =
8424 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(IP);
8425 __ LoadFromOffset(kLoadWord, kBakerCcEntrypointRegister, TR, entry_point_offset);
Roland Levillainba650a42017-03-06 13:52:32 +00008426
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008427 Label return_address;
8428 __ AdrCode(LR, &return_address);
8429 __ CmpConstant(kBakerCcEntrypointRegister, 0);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008430 // Currently the offset is always within range. If that changes,
8431 // we shall have to split the load the same way as for fields.
8432 DCHECK_LT(offset, kReferenceLoadMinFarOffset);
Vladimir Marko88abba22017-05-03 17:09:25 +01008433 DCHECK(!down_cast<Thumb2Assembler*>(GetAssembler())->IsForced32Bit());
8434 ScopedForce32Bit maybe_force_32bit(down_cast<Thumb2Assembler*>(GetAssembler()), !narrow);
8435 int old_position = GetAssembler()->GetBuffer()->GetPosition();
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008436 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
8437 EmitPlaceholderBne(codegen_, bne_label);
8438 __ Bind(&return_address);
Vladimir Marko88abba22017-05-03 17:09:25 +01008439 DCHECK_EQ(old_position - GetAssembler()->GetBuffer()->GetPosition(),
8440 narrow ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_OFFSET
8441 : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_OFFSET);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008442 } else {
8443 // Note that we do not actually check the value of
8444 // `GetIsGcMarking()` to decide whether to mark the loaded GC
8445 // root or not. Instead, we load into `temp` the read barrier
8446 // mark entry point corresponding to register `root`. If `temp`
8447 // is null, it means that `GetIsGcMarking()` is false, and vice
8448 // versa.
8449 //
8450 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
8451 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
8452 // if (temp != nullptr) { // <=> Thread::Current()->GetIsGcMarking()
8453 // // Slow path.
8454 // root = temp(root); // root = ReadBarrier::Mark(root); // Runtime entry point call.
8455 // }
Roland Levillainc9285912015-12-18 10:38:42 +00008456
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008457 // Slow path marking the GC root `root`. The entrypoint will already be loaded in `temp`.
8458 Location temp = Location::RegisterLocation(LR);
8459 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM(
8460 instruction, root, /* entrypoint */ temp);
8461 codegen_->AddSlowPath(slow_path);
8462
8463 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
8464 const int32_t entry_point_offset =
8465 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(root.reg());
8466 // Loading the entrypoint does not require a load acquire since it is only changed when
8467 // threads are suspended or running a checkpoint.
8468 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
8469
8470 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
8471 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
8472 static_assert(
8473 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
8474 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
8475 "have different sizes.");
8476 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
8477 "art::mirror::CompressedReference<mirror::Object> and int32_t "
8478 "have different sizes.");
8479
8480 // The entrypoint is null when the GC is not marking, this prevents one load compared to
8481 // checking GetIsGcMarking.
8482 __ CompareAndBranchIfNonZero(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
8483 __ Bind(slow_path->GetExitLabel());
8484 }
Roland Levillainc9285912015-12-18 10:38:42 +00008485 } else {
8486 // GC root loaded through a slow path for read barriers other
8487 // than Baker's.
8488 // /* GcRoot<mirror::Object>* */ root = obj + offset
8489 __ AddConstant(root_reg, obj, offset);
8490 // /* mirror::Object* */ root = root->Read()
8491 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
8492 }
8493 } else {
8494 // Plain GC root load with no read barrier.
8495 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
8496 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
8497 // Note that GC roots are not affected by heap poisoning, thus we
8498 // do not have to unpoison `root_reg` here.
8499 }
8500}
8501
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008502void CodeGeneratorARM::MaybeAddBakerCcEntrypointTempForFields(LocationSummary* locations) {
8503 DCHECK(kEmitCompilerReadBarrier);
8504 DCHECK(kUseBakerReadBarrier);
8505 if (kBakerReadBarrierLinkTimeThunksEnableForFields) {
8506 if (!Runtime::Current()->UseJitCompilation()) {
8507 locations->AddTemp(Location::RegisterLocation(kBakerCcEntrypointRegister));
8508 }
8509 }
8510}
8511
Roland Levillainc9285912015-12-18 10:38:42 +00008512void CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
8513 Location ref,
8514 Register obj,
8515 uint32_t offset,
8516 Location temp,
8517 bool needs_null_check) {
8518 DCHECK(kEmitCompilerReadBarrier);
8519 DCHECK(kUseBakerReadBarrier);
8520
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008521 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
8522 !Runtime::Current()->UseJitCompilation()) {
8523 // Note that we do not actually check the value of `GetIsGcMarking()`
8524 // to decide whether to mark the loaded reference or not. Instead, we
8525 // load into `temp` (actually kBakerCcEntrypointRegister) the read
8526 // barrier mark introspection entrypoint. If `temp` is null, it means
8527 // that `GetIsGcMarking()` is false, and vice versa.
8528 //
8529 // We use link-time generated thunks for the slow path. That thunk checks
8530 // the holder and jumps to the entrypoint if needed. If the holder is not
8531 // gray, it creates a fake dependency and returns to the LDR instruction.
8532 //
8533 // temp = Thread::Current()->pReadBarrierMarkIntrospection
8534 // lr = &gray_return_address;
8535 // if (temp != nullptr) {
8536 // goto field_thunk<holder_reg, base_reg>(lr)
8537 // }
8538 // not_gray_return_address:
8539 // // Original reference load. If the offset is too large to fit
8540 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01008541 // HeapReference<mirror::Object> reference = *(obj+offset);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008542 // gray_return_address:
8543
8544 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
Vladimir Marko88abba22017-05-03 17:09:25 +01008545 Register ref_reg = ref.AsRegister<Register>();
8546 bool narrow = CanEmitNarrowLdr(ref_reg, obj, offset);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008547 Register base = obj;
8548 if (offset >= kReferenceLoadMinFarOffset) {
8549 base = temp.AsRegister<Register>();
8550 DCHECK_NE(base, kBakerCcEntrypointRegister);
8551 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
8552 __ AddConstant(base, obj, offset & ~(kReferenceLoadMinFarOffset - 1u));
8553 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Marko88abba22017-05-03 17:09:25 +01008554 // Use narrow LDR only for small offsets. Generating narrow encoding LDR for the large
8555 // offsets with `(offset & (kReferenceLoadMinFarOffset - 1u)) < 32u` would most likely
8556 // increase the overall code size when taking the generated thunks into account.
8557 DCHECK(!narrow);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008558 }
8559 CheckLastTempIsBakerCcEntrypointRegister(instruction);
8560 uint32_t custom_data =
Vladimir Marko88abba22017-05-03 17:09:25 +01008561 linker::Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData(base, obj, narrow);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008562 Label* bne_label = NewBakerReadBarrierPatch(custom_data);
8563
8564 // entrypoint_reg =
8565 // Thread::Current()->pReadBarrierMarkReg12, i.e. pReadBarrierMarkIntrospection.
8566 DCHECK_EQ(IP, 12);
8567 const int32_t entry_point_offset =
8568 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(IP);
8569 __ LoadFromOffset(kLoadWord, kBakerCcEntrypointRegister, TR, entry_point_offset);
8570
8571 Label return_address;
8572 __ AdrCode(LR, &return_address);
8573 __ CmpConstant(kBakerCcEntrypointRegister, 0);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008574 EmitPlaceholderBne(this, bne_label);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008575 DCHECK_LT(offset, kReferenceLoadMinFarOffset);
Vladimir Marko88abba22017-05-03 17:09:25 +01008576 DCHECK(!down_cast<Thumb2Assembler*>(GetAssembler())->IsForced32Bit());
8577 ScopedForce32Bit maybe_force_32bit(down_cast<Thumb2Assembler*>(GetAssembler()), !narrow);
8578 int old_position = GetAssembler()->GetBuffer()->GetPosition();
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008579 __ LoadFromOffset(kLoadWord, ref_reg, base, offset);
8580 if (needs_null_check) {
8581 MaybeRecordImplicitNullCheck(instruction);
8582 }
8583 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
8584 __ Bind(&return_address);
Vladimir Marko88abba22017-05-03 17:09:25 +01008585 DCHECK_EQ(old_position - GetAssembler()->GetBuffer()->GetPosition(),
8586 narrow ? BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET
8587 : BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008588 return;
8589 }
8590
Roland Levillainc9285912015-12-18 10:38:42 +00008591 // /* HeapReference<Object> */ ref = *(obj + offset)
8592 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01008593 ScaleFactor no_scale_factor = TIMES_1;
Roland Levillainc9285912015-12-18 10:38:42 +00008594 GenerateReferenceLoadWithBakerReadBarrier(
Roland Levillainbfea3352016-06-23 13:48:47 +01008595 instruction, ref, obj, offset, no_index, no_scale_factor, temp, needs_null_check);
Roland Levillainc9285912015-12-18 10:38:42 +00008596}
8597
8598void CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
8599 Location ref,
8600 Register obj,
8601 uint32_t data_offset,
8602 Location index,
8603 Location temp,
8604 bool needs_null_check) {
8605 DCHECK(kEmitCompilerReadBarrier);
8606 DCHECK(kUseBakerReadBarrier);
8607
Roland Levillainbfea3352016-06-23 13:48:47 +01008608 static_assert(
8609 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
8610 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008611 ScaleFactor scale_factor = TIMES_4;
8612
8613 if (kBakerReadBarrierLinkTimeThunksEnableForArrays &&
8614 !Runtime::Current()->UseJitCompilation()) {
8615 // Note that we do not actually check the value of `GetIsGcMarking()`
8616 // to decide whether to mark the loaded reference or not. Instead, we
8617 // load into `temp` (actually kBakerCcEntrypointRegister) the read
8618 // barrier mark introspection entrypoint. If `temp` is null, it means
8619 // that `GetIsGcMarking()` is false, and vice versa.
8620 //
8621 // We use link-time generated thunks for the slow path. That thunk checks
8622 // the holder and jumps to the entrypoint if needed. If the holder is not
8623 // gray, it creates a fake dependency and returns to the LDR instruction.
8624 //
8625 // temp = Thread::Current()->pReadBarrierMarkIntrospection
8626 // lr = &gray_return_address;
8627 // if (temp != nullptr) {
8628 // goto field_thunk<holder_reg, base_reg>(lr)
8629 // }
8630 // not_gray_return_address:
8631 // // Original reference load. If the offset is too large to fit
8632 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01008633 // HeapReference<mirror::Object> reference = data[index];
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008634 // gray_return_address:
8635
8636 DCHECK(index.IsValid());
8637 Register index_reg = index.AsRegister<Register>();
8638 Register ref_reg = ref.AsRegister<Register>();
8639 Register data_reg = temp.AsRegister<Register>();
8640 DCHECK_NE(data_reg, kBakerCcEntrypointRegister);
8641
8642 CheckLastTempIsBakerCcEntrypointRegister(instruction);
8643 uint32_t custom_data =
8644 linker::Thumb2RelativePatcher::EncodeBakerReadBarrierArrayData(data_reg);
8645 Label* bne_label = NewBakerReadBarrierPatch(custom_data);
8646
8647 // entrypoint_reg =
8648 // Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
8649 DCHECK_EQ(IP, 12);
8650 const int32_t entry_point_offset =
8651 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(IP);
8652 __ LoadFromOffset(kLoadWord, kBakerCcEntrypointRegister, TR, entry_point_offset);
8653 __ AddConstant(data_reg, obj, data_offset);
8654
8655 Label return_address;
8656 __ AdrCode(LR, &return_address);
8657 __ CmpConstant(kBakerCcEntrypointRegister, 0);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008658 EmitPlaceholderBne(this, bne_label);
Vladimir Marko88abba22017-05-03 17:09:25 +01008659 ScopedForce32Bit maybe_force_32bit(down_cast<Thumb2Assembler*>(GetAssembler()));
8660 int old_position = GetAssembler()->GetBuffer()->GetPosition();
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008661 __ ldr(ref_reg, Address(data_reg, index_reg, LSL, scale_factor));
8662 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
8663 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
8664 __ Bind(&return_address);
Vladimir Marko88abba22017-05-03 17:09:25 +01008665 DCHECK_EQ(old_position - GetAssembler()->GetBuffer()->GetPosition(),
8666 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01008667 return;
8668 }
8669
Roland Levillainc9285912015-12-18 10:38:42 +00008670 // /* HeapReference<Object> */ ref =
8671 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
8672 GenerateReferenceLoadWithBakerReadBarrier(
Roland Levillainbfea3352016-06-23 13:48:47 +01008673 instruction, ref, obj, data_offset, index, scale_factor, temp, needs_null_check);
Roland Levillainc9285912015-12-18 10:38:42 +00008674}
8675
8676void CodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
8677 Location ref,
8678 Register obj,
8679 uint32_t offset,
8680 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01008681 ScaleFactor scale_factor,
Roland Levillainc9285912015-12-18 10:38:42 +00008682 Location temp,
Roland Levillainff487002017-03-07 16:50:01 +00008683 bool needs_null_check) {
Roland Levillainc9285912015-12-18 10:38:42 +00008684 DCHECK(kEmitCompilerReadBarrier);
8685 DCHECK(kUseBakerReadBarrier);
8686
Roland Levillain54f869e2017-03-06 13:54:11 +00008687 // Query `art::Thread::Current()->GetIsGcMarking()` to decide
8688 // whether we need to enter the slow path to mark the reference.
8689 // Then, in the slow path, check the gray bit in the lock word of
8690 // the reference's holder (`obj`) to decide whether to mark `ref` or
8691 // not.
Roland Levillainc9285912015-12-18 10:38:42 +00008692 //
Roland Levillainba650a42017-03-06 13:52:32 +00008693 // Note that we do not actually check the value of `GetIsGcMarking()`;
Roland Levillainff487002017-03-07 16:50:01 +00008694 // instead, we load into `temp2` the read barrier mark entry point
8695 // corresponding to register `ref`. If `temp2` is null, it means
8696 // that `GetIsGcMarking()` is false, and vice versa.
8697 //
8698 // temp2 = Thread::Current()->pReadBarrierMarkReg ## root.reg()
8699 // if (temp2 != nullptr) { // <=> Thread::Current()->GetIsGcMarking()
8700 // // Slow path.
8701 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
8702 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
8703 // HeapReference<mirror::Object> ref = *src; // Original reference load.
8704 // bool is_gray = (rb_state == ReadBarrier::GrayState());
8705 // if (is_gray) {
8706 // ref = temp2(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
8707 // }
8708 // } else {
8709 // HeapReference<mirror::Object> ref = *src; // Original reference load.
8710 // }
8711
8712 Register temp_reg = temp.AsRegister<Register>();
8713
8714 // Slow path marking the object `ref` when the GC is marking. The
8715 // entrypoint will already be loaded in `temp2`.
8716 Location temp2 = Location::RegisterLocation(LR);
8717 SlowPathCodeARM* slow_path =
8718 new (GetGraph()->GetArena()) LoadReferenceWithBakerReadBarrierSlowPathARM(
8719 instruction,
8720 ref,
8721 obj,
8722 offset,
8723 index,
8724 scale_factor,
8725 needs_null_check,
8726 temp_reg,
8727 /* entrypoint */ temp2);
8728 AddSlowPath(slow_path);
8729
8730 // temp2 = Thread::Current()->pReadBarrierMarkReg ## ref.reg()
8731 const int32_t entry_point_offset =
8732 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ref.reg());
8733 // Loading the entrypoint does not require a load acquire since it is only changed when
8734 // threads are suspended or running a checkpoint.
8735 __ LoadFromOffset(kLoadWord, temp2.AsRegister<Register>(), TR, entry_point_offset);
8736 // The entrypoint is null when the GC is not marking, this prevents one load compared to
8737 // checking GetIsGcMarking.
8738 __ CompareAndBranchIfNonZero(temp2.AsRegister<Register>(), slow_path->GetEntryLabel());
8739 // Fast path: the GC is not marking: just load the reference.
8740 GenerateRawReferenceLoad(instruction, ref, obj, offset, index, scale_factor, needs_null_check);
8741 __ Bind(slow_path->GetExitLabel());
8742}
8743
8744void CodeGeneratorARM::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
8745 Location ref,
8746 Register obj,
8747 Location field_offset,
8748 Location temp,
8749 bool needs_null_check,
8750 Register temp2) {
8751 DCHECK(kEmitCompilerReadBarrier);
8752 DCHECK(kUseBakerReadBarrier);
8753
8754 // Query `art::Thread::Current()->GetIsGcMarking()` to decide
8755 // whether we need to enter the slow path to update the reference
8756 // field within `obj`. Then, in the slow path, check the gray bit
8757 // in the lock word of the reference's holder (`obj`) to decide
8758 // whether to mark `ref` and update the field or not.
8759 //
8760 // Note that we do not actually check the value of `GetIsGcMarking()`;
Roland Levillainba650a42017-03-06 13:52:32 +00008761 // instead, we load into `temp3` the read barrier mark entry point
8762 // corresponding to register `ref`. If `temp3` is null, it means
8763 // that `GetIsGcMarking()` is false, and vice versa.
8764 //
8765 // temp3 = Thread::Current()->pReadBarrierMarkReg ## root.reg()
Roland Levillainba650a42017-03-06 13:52:32 +00008766 // if (temp3 != nullptr) { // <=> Thread::Current()->GetIsGcMarking()
8767 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00008768 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
8769 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
8770 // HeapReference<mirror::Object> ref = *src; // Original reference load.
8771 // bool is_gray = (rb_state == ReadBarrier::GrayState());
8772 // if (is_gray) {
Roland Levillainff487002017-03-07 16:50:01 +00008773 // old_ref = ref;
Roland Levillain54f869e2017-03-06 13:54:11 +00008774 // ref = temp3(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00008775 // compareAndSwapObject(obj, field_offset, old_ref, ref);
Roland Levillain54f869e2017-03-06 13:54:11 +00008776 // }
Roland Levillainc9285912015-12-18 10:38:42 +00008777 // }
Roland Levillainc9285912015-12-18 10:38:42 +00008778
Roland Levillain35345a52017-02-27 14:32:08 +00008779 Register temp_reg = temp.AsRegister<Register>();
Roland Levillain1372c9f2017-01-13 11:47:39 +00008780
Roland Levillainff487002017-03-07 16:50:01 +00008781 // Slow path updating the object reference at address `obj +
8782 // field_offset` when the GC is marking. The entrypoint will already
8783 // be loaded in `temp3`.
Roland Levillainba650a42017-03-06 13:52:32 +00008784 Location temp3 = Location::RegisterLocation(LR);
Roland Levillainff487002017-03-07 16:50:01 +00008785 SlowPathCodeARM* slow_path =
8786 new (GetGraph()->GetArena()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM(
8787 instruction,
8788 ref,
8789 obj,
8790 /* offset */ 0u,
8791 /* index */ field_offset,
8792 /* scale_factor */ ScaleFactor::TIMES_1,
8793 needs_null_check,
8794 temp_reg,
8795 temp2,
8796 /* entrypoint */ temp3);
Roland Levillainba650a42017-03-06 13:52:32 +00008797 AddSlowPath(slow_path);
Roland Levillain35345a52017-02-27 14:32:08 +00008798
Roland Levillainba650a42017-03-06 13:52:32 +00008799 // temp3 = Thread::Current()->pReadBarrierMarkReg ## ref.reg()
8800 const int32_t entry_point_offset =
8801 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ref.reg());
8802 // Loading the entrypoint does not require a load acquire since it is only changed when
8803 // threads are suspended or running a checkpoint.
8804 __ LoadFromOffset(kLoadWord, temp3.AsRegister<Register>(), TR, entry_point_offset);
Roland Levillainba650a42017-03-06 13:52:32 +00008805 // The entrypoint is null when the GC is not marking, this prevents one load compared to
8806 // checking GetIsGcMarking.
8807 __ CompareAndBranchIfNonZero(temp3.AsRegister<Register>(), slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00008808 // Fast path: the GC is not marking: nothing to do (the field is
8809 // up-to-date, and we don't need to load the reference).
Roland Levillainba650a42017-03-06 13:52:32 +00008810 __ Bind(slow_path->GetExitLabel());
8811}
Roland Levillain35345a52017-02-27 14:32:08 +00008812
Roland Levillainba650a42017-03-06 13:52:32 +00008813void CodeGeneratorARM::GenerateRawReferenceLoad(HInstruction* instruction,
8814 Location ref,
8815 Register obj,
8816 uint32_t offset,
8817 Location index,
8818 ScaleFactor scale_factor,
8819 bool needs_null_check) {
8820 Register ref_reg = ref.AsRegister<Register>();
8821
Roland Levillainc9285912015-12-18 10:38:42 +00008822 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008823 // Load types involving an "index": ArrayGet,
8824 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
8825 // intrinsics.
Roland Levillainba650a42017-03-06 13:52:32 +00008826 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainc9285912015-12-18 10:38:42 +00008827 if (index.IsConstant()) {
8828 size_t computed_offset =
Roland Levillainbfea3352016-06-23 13:48:47 +01008829 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
Roland Levillainc9285912015-12-18 10:38:42 +00008830 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
8831 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01008832 // Handle the special case of the
Roland Levillaina1aa3b12016-10-26 13:03:38 +01008833 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
8834 // intrinsics, which use a register pair as index ("long
8835 // offset"), of which only the low part contains data.
Roland Levillainbfea3352016-06-23 13:48:47 +01008836 Register index_reg = index.IsRegisterPair()
8837 ? index.AsRegisterPairLow<Register>()
8838 : index.AsRegister<Register>();
8839 __ add(IP, obj, ShifterOperand(index_reg, LSL, scale_factor));
Roland Levillainc9285912015-12-18 10:38:42 +00008840 __ LoadFromOffset(kLoadWord, ref_reg, IP, offset);
8841 }
8842 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00008843 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillainc9285912015-12-18 10:38:42 +00008844 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
8845 }
8846
Roland Levillainba650a42017-03-06 13:52:32 +00008847 if (needs_null_check) {
8848 MaybeRecordImplicitNullCheck(instruction);
8849 }
8850
Roland Levillainc9285912015-12-18 10:38:42 +00008851 // Object* ref = ref_addr->AsMirrorPtr()
8852 __ MaybeUnpoisonHeapReference(ref_reg);
Roland Levillainc9285912015-12-18 10:38:42 +00008853}
8854
8855void CodeGeneratorARM::GenerateReadBarrierSlow(HInstruction* instruction,
8856 Location out,
8857 Location ref,
8858 Location obj,
8859 uint32_t offset,
8860 Location index) {
Roland Levillain3b359c72015-11-17 19:35:12 +00008861 DCHECK(kEmitCompilerReadBarrier);
8862
Roland Levillainc9285912015-12-18 10:38:42 +00008863 // Insert a slow path based read barrier *after* the reference load.
8864 //
Roland Levillain3b359c72015-11-17 19:35:12 +00008865 // If heap poisoning is enabled, the unpoisoning of the loaded
8866 // reference will be carried out by the runtime within the slow
8867 // path.
8868 //
8869 // Note that `ref` currently does not get unpoisoned (when heap
8870 // poisoning is enabled), which is alright as the `ref` argument is
8871 // not used by the artReadBarrierSlow entry point.
8872 //
8873 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Artem Serovf4d6aee2016-07-11 10:41:45 +01008874 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena())
Roland Levillain3b359c72015-11-17 19:35:12 +00008875 ReadBarrierForHeapReferenceSlowPathARM(instruction, out, ref, obj, offset, index);
8876 AddSlowPath(slow_path);
8877
Roland Levillain3b359c72015-11-17 19:35:12 +00008878 __ b(slow_path->GetEntryLabel());
8879 __ Bind(slow_path->GetExitLabel());
8880}
8881
Roland Levillainc9285912015-12-18 10:38:42 +00008882void CodeGeneratorARM::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
8883 Location out,
8884 Location ref,
8885 Location obj,
8886 uint32_t offset,
8887 Location index) {
Roland Levillain3b359c72015-11-17 19:35:12 +00008888 if (kEmitCompilerReadBarrier) {
Roland Levillainc9285912015-12-18 10:38:42 +00008889 // Baker's read barriers shall be handled by the fast path
8890 // (CodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier).
8891 DCHECK(!kUseBakerReadBarrier);
Roland Levillain3b359c72015-11-17 19:35:12 +00008892 // If heap poisoning is enabled, unpoisoning will be taken care of
8893 // by the runtime within the slow path.
Roland Levillainc9285912015-12-18 10:38:42 +00008894 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain3b359c72015-11-17 19:35:12 +00008895 } else if (kPoisonHeapReferences) {
8896 __ UnpoisonHeapReference(out.AsRegister<Register>());
8897 }
8898}
8899
Roland Levillainc9285912015-12-18 10:38:42 +00008900void CodeGeneratorARM::GenerateReadBarrierForRootSlow(HInstruction* instruction,
8901 Location out,
8902 Location root) {
Roland Levillain3b359c72015-11-17 19:35:12 +00008903 DCHECK(kEmitCompilerReadBarrier);
8904
Roland Levillainc9285912015-12-18 10:38:42 +00008905 // Insert a slow path based read barrier *after* the GC root load.
8906 //
Roland Levillain3b359c72015-11-17 19:35:12 +00008907 // Note that GC roots are not affected by heap poisoning, so we do
8908 // not need to do anything special for this here.
Artem Serovf4d6aee2016-07-11 10:41:45 +01008909 SlowPathCodeARM* slow_path =
Roland Levillain3b359c72015-11-17 19:35:12 +00008910 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM(instruction, out, root);
8911 AddSlowPath(slow_path);
8912
Roland Levillain3b359c72015-11-17 19:35:12 +00008913 __ b(slow_path->GetEntryLabel());
8914 __ Bind(slow_path->GetExitLabel());
8915}
8916
Vladimir Markodc151b22015-10-15 18:02:30 +01008917HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM::GetSupportedInvokeStaticOrDirectDispatch(
8918 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +00008919 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffraye807ff72017-01-23 09:03:12 +00008920 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01008921}
8922
Vladimir Markob4536b72015-11-24 13:45:23 +00008923Register CodeGeneratorARM::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
8924 Register temp) {
8925 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
8926 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
8927 if (!invoke->GetLocations()->Intrinsified()) {
8928 return location.AsRegister<Register>();
8929 }
8930 // For intrinsics we allow any location, so it may be on the stack.
8931 if (!location.IsRegister()) {
8932 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
8933 return temp;
8934 }
8935 // For register locations, check if the register was saved. If so, get it from the stack.
8936 // Note: There is a chance that the register was saved but not overwritten, so we could
8937 // save one load. However, since this is just an intrinsic slow path we prefer this
8938 // simple and more robust approach rather that trying to determine if that's the case.
8939 SlowPathCode* slow_path = GetCurrentSlowPath();
Vladimir Markod254f5c2017-06-02 15:18:36 +00008940 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
8941 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
Vladimir Markob4536b72015-11-24 13:45:23 +00008942 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
8943 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
8944 return temp;
8945 }
8946 return location.AsRegister<Register>();
8947}
8948
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008949void CodeGeneratorARM::GenerateStaticOrDirectCall(
8950 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Vladimir Marko58155012015-08-19 12:49:41 +00008951 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
8952 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01008953 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
8954 uint32_t offset =
8955 GetThreadOffset<kArmPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00008956 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01008957 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, offset);
Vladimir Marko58155012015-08-19 12:49:41 +00008958 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01008959 }
Vladimir Marko58155012015-08-19 12:49:41 +00008960 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00008961 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00008962 break;
Vladimir Marko65979462017-05-19 17:25:12 +01008963 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
8964 DCHECK(GetCompilerOptions().IsBootImage());
8965 Register temp_reg = temp.AsRegister<Register>();
8966 PcRelativePatchInfo* labels = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
8967 __ BindTrackedLabel(&labels->movw_label);
8968 __ movw(temp_reg, /* placeholder */ 0u);
8969 __ BindTrackedLabel(&labels->movt_label);
8970 __ movt(temp_reg, /* placeholder */ 0u);
8971 __ BindTrackedLabel(&labels->add_pc_label);
8972 __ add(temp_reg, temp_reg, ShifterOperand(PC));
8973 break;
8974 }
Vladimir Marko58155012015-08-19 12:49:41 +00008975 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
8976 __ LoadImmediate(temp.AsRegister<Register>(), invoke->GetMethodAddress());
8977 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01008978 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
8979 Register temp_reg = temp.AsRegister<Register>();
8980 PcRelativePatchInfo* labels = NewMethodBssEntryPatch(
8981 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
8982 __ BindTrackedLabel(&labels->movw_label);
8983 __ movw(temp_reg, /* placeholder */ 0u);
8984 __ BindTrackedLabel(&labels->movt_label);
8985 __ movt(temp_reg, /* placeholder */ 0u);
8986 __ BindTrackedLabel(&labels->add_pc_label);
8987 __ add(temp_reg, temp_reg, ShifterOperand(PC));
8988 __ LoadFromOffset(kLoadWord, temp_reg, temp_reg, /* offset */ 0);
Vladimir Markob4536b72015-11-24 13:45:23 +00008989 break;
8990 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01008991 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
8992 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
8993 return; // No code pointer retrieval; the runtime performs the call directly.
Nicolas Geoffrayae71a052015-06-09 14:12:28 +01008994 }
Vladimir Marko58155012015-08-19 12:49:41 +00008995 }
8996
8997 switch (invoke->GetCodePtrLocation()) {
8998 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
8999 __ bl(GetFrameEntryLabel());
9000 break;
Vladimir Marko58155012015-08-19 12:49:41 +00009001 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
9002 // LR = callee_method->entry_point_from_quick_compiled_code_
9003 __ LoadFromOffset(
9004 kLoadWord, LR, callee_method.AsRegister<Register>(),
Andreas Gampe542451c2016-07-26 09:02:02 -07009005 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00009006 // LR()
9007 __ blx(LR);
9008 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08009009 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009010 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08009011
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08009012 DCHECK(!IsLeafMethod());
9013}
9014
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009015void CodeGeneratorARM::GenerateVirtualCall(
9016 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009017 Register temp = temp_location.AsRegister<Register>();
9018 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
9019 invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00009020
9021 // Use the calling convention instead of the location of the receiver, as
9022 // intrinsics may have put the receiver in a different register. In the intrinsics
9023 // slow path, the arguments have been moved to the right place, so here we are
9024 // guaranteed that the receiver is the first register of the calling convention.
9025 InvokeDexCallingConvention calling_convention;
9026 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009027 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain3b359c72015-11-17 19:35:12 +00009028 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00009029 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009030 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain3b359c72015-11-17 19:35:12 +00009031 // Instead of simply (possibly) unpoisoning `temp` here, we should
9032 // emit a read barrier for the previous class reference load.
9033 // However this is not required in practice, as this is an
9034 // intermediate/temporary reference and because the current
9035 // concurrent copying collector keeps the from-space memory
9036 // intact/accessible until the end of the marking phase (the
9037 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009038 __ MaybeUnpoisonHeapReference(temp);
9039 // temp = temp->GetMethodAt(method_offset);
9040 uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07009041 kArmPointerSize).Int32Value();
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009042 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
9043 // LR = temp->GetEntryPoint();
9044 __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
9045 // LR();
9046 __ blx(LR);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01009047 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00009048}
9049
Vladimir Marko65979462017-05-19 17:25:12 +01009050CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeMethodPatch(
9051 MethodReference target_method) {
9052 return NewPcRelativePatch(*target_method.dex_file,
9053 target_method.dex_method_index,
9054 &pc_relative_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009055}
9056
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009057CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewMethodBssEntryPatch(
9058 MethodReference target_method) {
9059 return NewPcRelativePatch(*target_method.dex_file,
9060 target_method.dex_method_index,
9061 &method_bss_entry_patches_);
9062}
9063
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01009064CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeTypePatch(
Andreas Gampea5b09a62016-11-17 15:21:22 -08009065 const DexFile& dex_file, dex::TypeIndex type_index) {
9066 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01009067}
9068
Vladimir Marko1998cd02017-01-13 13:02:58 +00009069CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewTypeBssEntryPatch(
9070 const DexFile& dex_file, dex::TypeIndex type_index) {
9071 return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
9072}
9073
Vladimir Marko65979462017-05-19 17:25:12 +01009074CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeStringPatch(
9075 const DexFile& dex_file, dex::StringIndex string_index) {
9076 return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
9077}
9078
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009079CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativePatch(
9080 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
9081 patches->emplace_back(dex_file, offset_or_index);
9082 return &patches->back();
9083}
9084
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009085Label* CodeGeneratorARM::NewBakerReadBarrierPatch(uint32_t custom_data) {
9086 baker_read_barrier_patches_.emplace_back(custom_data);
9087 return &baker_read_barrier_patches_.back().label;
9088}
9089
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009090Literal* CodeGeneratorARM::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00009091 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009092}
9093
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009094Literal* CodeGeneratorARM::DeduplicateJitStringLiteral(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00009095 dex::StringIndex string_index,
9096 Handle<mirror::String> handle) {
9097 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
9098 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009099 return jit_string_patches_.GetOrCreate(
9100 StringReference(&dex_file, string_index),
9101 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
9102}
9103
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009104Literal* CodeGeneratorARM::DeduplicateJitClassLiteral(const DexFile& dex_file,
9105 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00009106 Handle<mirror::Class> handle) {
9107 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
9108 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009109 return jit_class_patches_.GetOrCreate(
9110 TypeReference(&dex_file, type_index),
9111 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
9112}
9113
Vladimir Markoaad75c62016-10-03 08:46:48 +00009114template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
9115inline void CodeGeneratorARM::EmitPcRelativeLinkerPatches(
9116 const ArenaDeque<PcRelativePatchInfo>& infos,
9117 ArenaVector<LinkerPatch>* linker_patches) {
9118 for (const PcRelativePatchInfo& info : infos) {
9119 const DexFile& dex_file = info.target_dex_file;
9120 size_t offset_or_index = info.offset_or_index;
9121 DCHECK(info.add_pc_label.IsBound());
9122 uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.Position());
9123 // Add MOVW patch.
9124 DCHECK(info.movw_label.IsBound());
9125 uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.Position());
9126 linker_patches->push_back(Factory(movw_offset, &dex_file, add_pc_offset, offset_or_index));
9127 // Add MOVT patch.
9128 DCHECK(info.movt_label.IsBound());
9129 uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.Position());
9130 linker_patches->push_back(Factory(movt_offset, &dex_file, add_pc_offset, offset_or_index));
9131 }
9132}
9133
Vladimir Marko58155012015-08-19 12:49:41 +00009134void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
9135 DCHECK(linker_patches->empty());
Vladimir Markob4536b72015-11-24 13:45:23 +00009136 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01009137 /* MOVW+MOVT for each entry */ 2u * pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009138 /* MOVW+MOVT for each entry */ 2u * method_bss_entry_patches_.size() +
Vladimir Markoaad75c62016-10-03 08:46:48 +00009139 /* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009140 /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01009141 /* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009142 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00009143 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01009144 if (GetCompilerOptions().IsBootImage()) {
9145 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00009146 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00009147 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
9148 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00009149 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
9150 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01009151 } else {
9152 DCHECK(pc_relative_method_patches_.empty());
9153 DCHECK(pc_relative_type_patches_.empty());
9154 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
9155 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009156 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01009157 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
9158 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00009159 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
9160 linker_patches);
Vladimir Markoeee1c0e2017-04-21 17:58:41 +01009161 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
9162 linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.Position(),
9163 info.custom_data));
9164 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00009165 DCHECK_EQ(size, linker_patches->size());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009166}
9167
9168Literal* CodeGeneratorARM::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
9169 return map->GetOrCreate(
9170 value,
9171 [this, value]() { return __ NewLiteral<uint32_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00009172}
9173
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03009174void LocationsBuilderARM::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9175 LocationSummary* locations =
9176 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
9177 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
9178 Location::RequiresRegister());
9179 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
9180 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
9181 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
9182}
9183
9184void InstructionCodeGeneratorARM::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
9185 LocationSummary* locations = instr->GetLocations();
9186 Register res = locations->Out().AsRegister<Register>();
9187 Register accumulator =
9188 locations->InAt(HMultiplyAccumulate::kInputAccumulatorIndex).AsRegister<Register>();
9189 Register mul_left =
9190 locations->InAt(HMultiplyAccumulate::kInputMulLeftIndex).AsRegister<Register>();
9191 Register mul_right =
9192 locations->InAt(HMultiplyAccumulate::kInputMulRightIndex).AsRegister<Register>();
9193
9194 if (instr->GetOpKind() == HInstruction::kAdd) {
9195 __ mla(res, mul_left, mul_right, accumulator);
9196 } else {
9197 __ mls(res, mul_left, mul_right, accumulator);
9198 }
9199}
9200
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01009201void LocationsBuilderARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00009202 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00009203 LOG(FATAL) << "Unreachable";
9204}
9205
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01009206void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00009207 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00009208 LOG(FATAL) << "Unreachable";
9209}
9210
Mark Mendellfe57faa2015-09-18 09:26:15 -04009211// Simple implementation of packed switch - generate cascaded compare/jumps.
9212void LocationsBuilderARM::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9213 LocationSummary* locations =
9214 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
9215 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009216 if (switch_instr->GetNumEntries() > kPackedSwitchCompareJumpThreshold &&
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009217 codegen_->GetAssembler()->IsThumb()) {
9218 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the table base.
9219 if (switch_instr->GetStartValue() != 0) {
9220 locations->AddTemp(Location::RequiresRegister()); // We need a temp for the bias.
9221 }
9222 }
Mark Mendellfe57faa2015-09-18 09:26:15 -04009223}
9224
9225void InstructionCodeGeneratorARM::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9226 int32_t lower_bound = switch_instr->GetStartValue();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009227 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04009228 LocationSummary* locations = switch_instr->GetLocations();
9229 Register value_reg = locations->InAt(0).AsRegister<Register>();
9230 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9231
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009232 if (num_entries <= kPackedSwitchCompareJumpThreshold || !codegen_->GetAssembler()->IsThumb()) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009233 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009234 Register temp_reg = IP;
9235 // Note: It is fine for the below AddConstantSetFlags() using IP register to temporarily store
9236 // the immediate, because IP is used as the destination register. For the other
9237 // AddConstantSetFlags() and GenerateCompareWithImmediate(), the immediate values are constant,
9238 // and they can be encoded in the instruction without making use of IP register.
9239 __ AddConstantSetFlags(temp_reg, value_reg, -lower_bound);
9240
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009241 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009242 // Jump to successors[0] if value == lower_bound.
9243 __ b(codegen_->GetLabelOf(successors[0]), EQ);
9244 int32_t last_index = 0;
9245 for (; num_entries - last_index > 2; last_index += 2) {
9246 __ AddConstantSetFlags(temp_reg, temp_reg, -2);
9247 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
9248 __ b(codegen_->GetLabelOf(successors[last_index + 1]), LO);
9249 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
9250 __ b(codegen_->GetLabelOf(successors[last_index + 2]), EQ);
9251 }
9252 if (num_entries - last_index == 2) {
9253 // The last missing case_value.
Vladimir Markoac6ac102015-12-17 12:14:00 +00009254 __ CmpConstant(temp_reg, 1);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009255 __ b(codegen_->GetLabelOf(successors[last_index + 1]), EQ);
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009256 }
Mark Mendellfe57faa2015-09-18 09:26:15 -04009257
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07009258 // And the default for any other value.
9259 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
9260 __ b(codegen_->GetLabelOf(default_block));
9261 }
9262 } else {
9263 // Create a table lookup.
9264 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
9265
9266 // Materialize a pointer to the switch table
9267 std::vector<Label*> labels(num_entries);
9268 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
9269 for (uint32_t i = 0; i < num_entries; i++) {
9270 labels[i] = codegen_->GetLabelOf(successors[i]);
9271 }
9272 JumpTable* table = __ CreateJumpTable(std::move(labels), temp_reg);
9273
9274 // Remove the bias.
9275 Register key_reg;
9276 if (lower_bound != 0) {
9277 key_reg = locations->GetTemp(1).AsRegister<Register>();
9278 __ AddConstant(key_reg, value_reg, -lower_bound);
9279 } else {
9280 key_reg = value_reg;
9281 }
9282
9283 // Check whether the value is in the table, jump to default block if not.
9284 __ CmpConstant(key_reg, num_entries - 1);
9285 __ b(codegen_->GetLabelOf(default_block), Condition::HI);
9286
9287 // Load the displacement from the table.
9288 __ ldr(temp_reg, Address(temp_reg, key_reg, Shift::LSL, 2));
9289
9290 // Dispatch is a direct add to the PC (for Thumb2).
9291 __ EmitJumpTableDispatch(table, temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04009292 }
9293}
9294
Andreas Gampe85b62f22015-09-09 13:15:38 -07009295void CodeGeneratorARM::MoveFromReturnRegister(Location trg, Primitive::Type type) {
9296 if (!trg.IsValid()) {
Roland Levillainc9285912015-12-18 10:38:42 +00009297 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07009298 return;
9299 }
9300
9301 DCHECK_NE(type, Primitive::kPrimVoid);
9302
9303 Location return_loc = InvokeDexCallingConventionVisitorARM().GetReturnLocation(type);
9304 if (return_loc.Equals(trg)) {
9305 return;
9306 }
9307
9308 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
9309 // with the last branch.
9310 if (type == Primitive::kPrimLong) {
9311 HParallelMove parallel_move(GetGraph()->GetArena());
9312 parallel_move.AddMove(return_loc.ToLow(), trg.ToLow(), Primitive::kPrimInt, nullptr);
9313 parallel_move.AddMove(return_loc.ToHigh(), trg.ToHigh(), Primitive::kPrimInt, nullptr);
9314 GetMoveResolver()->EmitNativeCode(&parallel_move);
9315 } else if (type == Primitive::kPrimDouble) {
9316 HParallelMove parallel_move(GetGraph()->GetArena());
9317 parallel_move.AddMove(return_loc.ToLow(), trg.ToLow(), Primitive::kPrimFloat, nullptr);
9318 parallel_move.AddMove(return_loc.ToHigh(), trg.ToHigh(), Primitive::kPrimFloat, nullptr);
9319 GetMoveResolver()->EmitNativeCode(&parallel_move);
9320 } else {
9321 // Let the parallel move resolver take care of all of this.
9322 HParallelMove parallel_move(GetGraph()->GetArena());
9323 parallel_move.AddMove(return_loc, trg, type, nullptr);
9324 GetMoveResolver()->EmitNativeCode(&parallel_move);
9325 }
9326}
9327
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009328void LocationsBuilderARM::VisitClassTableGet(HClassTableGet* instruction) {
9329 LocationSummary* locations =
9330 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
9331 locations->SetInAt(0, Location::RequiresRegister());
9332 locations->SetOut(Location::RequiresRegister());
9333}
9334
9335void InstructionCodeGeneratorARM::VisitClassTableGet(HClassTableGet* instruction) {
9336 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00009337 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009338 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009339 instruction->GetIndex(), kArmPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009340 __ LoadFromOffset(kLoadWord,
9341 locations->Out().AsRegister<Register>(),
9342 locations->InAt(0).AsRegister<Register>(),
9343 method_offset);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009344 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009345 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00009346 instruction->GetIndex(), kArmPointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009347 __ LoadFromOffset(kLoadWord,
9348 locations->Out().AsRegister<Register>(),
9349 locations->InAt(0).AsRegister<Register>(),
9350 mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value());
9351 __ LoadFromOffset(kLoadWord,
9352 locations->Out().AsRegister<Register>(),
9353 locations->Out().AsRegister<Register>(),
9354 method_offset);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009355 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009356}
9357
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009358static void PatchJitRootUse(uint8_t* code,
9359 const uint8_t* roots_data,
9360 Literal* literal,
9361 uint64_t index_in_table) {
9362 DCHECK(literal->GetLabel()->IsBound());
9363 uint32_t literal_offset = literal->GetLabel()->Position();
9364 uintptr_t address =
9365 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
9366 uint8_t* data = code + literal_offset;
9367 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
9368}
9369
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009370void CodeGeneratorARM::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
9371 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009372 const StringReference& string_reference = entry.first;
9373 Literal* table_entry_literal = entry.second;
9374 const auto it = jit_string_roots_.find(string_reference);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009375 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009376 uint64_t index_in_table = it->second;
9377 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009378 }
9379 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009380 const TypeReference& type_reference = entry.first;
9381 Literal* table_entry_literal = entry.second;
9382 const auto it = jit_class_roots_.find(type_reference);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00009383 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01009384 uint64_t index_in_table = it->second;
9385 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00009386 }
9387}
9388
Roland Levillain4d027112015-07-01 15:41:14 +01009389#undef __
9390#undef QUICK_ENTRY_POINT
9391
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00009392} // namespace arm
9393} // namespace art