blob: 2669d97d82c6fdbac7c738d15559346b3e0dee0f [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070025#include "mirror/object_array-inl.h"
Chris Larsen701566a2015-10-27 15:29:13 -070026#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070027#include "scoped_thread_state_change-inl.h"
Chris Larsen701566a2015-10-27 15:29:13 -070028#include "thread.h"
29#include "utils/mips/assembler_mips.h"
30#include "utils/mips/constants_mips.h"
31
32namespace art {
33
34namespace mips {
35
36IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
Chris Larsen5633ce72017-04-10 15:47:40 -070037 : codegen_(codegen), arena_(codegen->GetGraph()->GetArena()) {
Chris Larsen701566a2015-10-27 15:29:13 -070038}
39
40MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
41 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
42}
43
44ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
45 return codegen_->GetGraph()->GetArena();
46}
47
Alexey Frunzebb9863a2016-01-11 15:51:16 -080048inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080049 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
50}
51
Alexey Frunzebb9863a2016-01-11 15:51:16 -080052inline bool IntrinsicCodeGeneratorMIPS::IsR6() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080053 return codegen_->GetInstructionSetFeatures().IsR6();
54}
55
Alexey Frunzebb9863a2016-01-11 15:51:16 -080056inline bool IntrinsicCodeGeneratorMIPS::Is32BitFPU() const {
57 return codegen_->GetInstructionSetFeatures().Is32BitFloatingPoint();
58}
59
Chris Larsen701566a2015-10-27 15:29:13 -070060#define __ codegen->GetAssembler()->
61
62static void MoveFromReturnRegister(Location trg,
63 Primitive::Type type,
64 CodeGeneratorMIPS* codegen) {
65 if (!trg.IsValid()) {
66 DCHECK_EQ(type, Primitive::kPrimVoid);
67 return;
68 }
69
70 DCHECK_NE(type, Primitive::kPrimVoid);
71
72 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
73 Register trg_reg = trg.AsRegister<Register>();
74 if (trg_reg != V0) {
75 __ Move(V0, trg_reg);
76 }
77 } else {
78 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
79 if (trg_reg != F0) {
80 if (type == Primitive::kPrimFloat) {
81 __ MovS(F0, trg_reg);
82 } else {
83 __ MovD(F0, trg_reg);
84 }
85 }
86 }
87}
88
89static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
90 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
91 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
92}
93
94// Slow-path for fallback (calling the managed code to handle the
95// intrinsic) in an intrinsified call. This will copy the arguments
96// into the positions for a regular call.
97//
98// Note: The actual parameters are required to be in the locations
99// given by the invoke's location summary. If an intrinsic
100// modifies those locations before a slowpath call, they must be
101// restored!
102class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
103 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000104 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : SlowPathCodeMIPS(invoke), invoke_(invoke) { }
Chris Larsen701566a2015-10-27 15:29:13 -0700105
106 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
107 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
108
109 __ Bind(GetEntryLabel());
110
111 SaveLiveRegisters(codegen, invoke_->GetLocations());
112
113 MoveArguments(invoke_, codegen);
114
115 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100116 codegen->GenerateStaticOrDirectCall(
117 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700118 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100119 codegen->GenerateVirtualCall(
120 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700121 }
122
123 // Copy the result back to the expected output.
124 Location out = invoke_->GetLocations()->Out();
125 if (out.IsValid()) {
126 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
127 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
128 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
129 }
130
131 RestoreLiveRegisters(codegen, invoke_->GetLocations());
132 __ B(GetExitLabel());
133 }
134
135 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
136
137 private:
138 // The instruction where this slow path is happening.
139 HInvoke* const invoke_;
140
141 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
142};
143
144#undef __
145
146bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
147 Dispatch(invoke);
148 LocationSummary* res = invoke->GetLocations();
149 return res != nullptr && res->Intrinsified();
150}
151
152#define __ assembler->
153
Chris Larsen3f8bf652015-10-28 10:08:56 -0700154static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
155 LocationSummary* locations = new (arena) LocationSummary(invoke,
156 LocationSummary::kNoCall,
157 kIntrinsified);
158 locations->SetInAt(0, Location::RequiresFpuRegister());
159 locations->SetOut(Location::RequiresRegister());
160}
161
162static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
163 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
164
165 if (is64bit) {
166 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
167 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
168
169 __ Mfc1(out_lo, in);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800170 __ MoveFromFpuHigh(out_hi, in);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700171 } else {
172 Register out = locations->Out().AsRegister<Register>();
173
174 __ Mfc1(out, in);
175 }
176}
177
178// long java.lang.Double.doubleToRawLongBits(double)
179void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
180 CreateFPToIntLocations(arena_, invoke);
181}
182
183void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000184 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700185}
186
187// int java.lang.Float.floatToRawIntBits(float)
188void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
189 CreateFPToIntLocations(arena_, invoke);
190}
191
192void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000193 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700194}
195
196static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
197 LocationSummary* locations = new (arena) LocationSummary(invoke,
198 LocationSummary::kNoCall,
199 kIntrinsified);
200 locations->SetInAt(0, Location::RequiresRegister());
201 locations->SetOut(Location::RequiresFpuRegister());
202}
203
204static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
205 FRegister out = locations->Out().AsFpuRegister<FRegister>();
206
207 if (is64bit) {
208 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
209 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
210
211 __ Mtc1(in_lo, out);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800212 __ MoveToFpuHigh(in_hi, out);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700213 } else {
214 Register in = locations->InAt(0).AsRegister<Register>();
215
216 __ Mtc1(in, out);
217 }
218}
219
220// double java.lang.Double.longBitsToDouble(long)
221void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
222 CreateIntToFPLocations(arena_, invoke);
223}
224
225void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000226 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700227}
228
229// float java.lang.Float.intBitsToFloat(int)
230void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
231 CreateIntToFPLocations(arena_, invoke);
232}
233
234void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000235 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700236}
237
Chris Larsen86829602015-11-18 12:27:52 -0800238static void CreateIntToIntLocations(ArenaAllocator* arena,
239 HInvoke* invoke,
240 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700241 LocationSummary* locations = new (arena) LocationSummary(invoke,
242 LocationSummary::kNoCall,
243 kIntrinsified);
244 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800245 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700246}
247
Chris Larsen70014c82015-11-18 12:26:08 -0800248static void GenReverse(LocationSummary* locations,
249 Primitive::Type type,
250 bool isR2OrNewer,
251 bool isR6,
252 bool reverseBits,
253 MipsAssembler* assembler) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700254 DCHECK(type == Primitive::kPrimShort ||
255 type == Primitive::kPrimInt ||
256 type == Primitive::kPrimLong);
Chris Larsen70014c82015-11-18 12:26:08 -0800257 DCHECK(type != Primitive::kPrimShort || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700258
259 if (type == Primitive::kPrimShort) {
260 Register in = locations->InAt(0).AsRegister<Register>();
261 Register out = locations->Out().AsRegister<Register>();
262
263 if (isR2OrNewer) {
264 __ Wsbh(out, in);
265 __ Seh(out, out);
266 } else {
267 __ Sll(TMP, in, 24);
268 __ Sra(TMP, TMP, 16);
269 __ Sll(out, in, 16);
270 __ Srl(out, out, 24);
271 __ Or(out, out, TMP);
272 }
273 } else if (type == Primitive::kPrimInt) {
274 Register in = locations->InAt(0).AsRegister<Register>();
275 Register out = locations->Out().AsRegister<Register>();
276
277 if (isR2OrNewer) {
278 __ Rotr(out, in, 16);
279 __ Wsbh(out, out);
280 } else {
281 // MIPS32r1
282 // __ Rotr(out, in, 16);
283 __ Sll(TMP, in, 16);
284 __ Srl(out, in, 16);
285 __ Or(out, out, TMP);
286 // __ Wsbh(out, out);
287 __ LoadConst32(AT, 0x00FF00FF);
288 __ And(TMP, out, AT);
289 __ Sll(TMP, TMP, 8);
290 __ Srl(out, out, 8);
291 __ And(out, out, AT);
292 __ Or(out, out, TMP);
293 }
Chris Larsen70014c82015-11-18 12:26:08 -0800294 if (reverseBits) {
295 if (isR6) {
296 __ Bitswap(out, out);
297 } else {
298 __ LoadConst32(AT, 0x0F0F0F0F);
299 __ And(TMP, out, AT);
300 __ Sll(TMP, TMP, 4);
301 __ Srl(out, out, 4);
302 __ And(out, out, AT);
303 __ Or(out, TMP, out);
304 __ LoadConst32(AT, 0x33333333);
305 __ And(TMP, out, AT);
306 __ Sll(TMP, TMP, 2);
307 __ Srl(out, out, 2);
308 __ And(out, out, AT);
309 __ Or(out, TMP, out);
310 __ LoadConst32(AT, 0x55555555);
311 __ And(TMP, out, AT);
312 __ Sll(TMP, TMP, 1);
313 __ Srl(out, out, 1);
314 __ And(out, out, AT);
315 __ Or(out, TMP, out);
316 }
317 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700318 } else if (type == Primitive::kPrimLong) {
319 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
320 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
321 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
322 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
323
324 if (isR2OrNewer) {
325 __ Rotr(AT, in_hi, 16);
326 __ Rotr(TMP, in_lo, 16);
327 __ Wsbh(out_lo, AT);
328 __ Wsbh(out_hi, TMP);
329 } else {
330 // When calling CreateIntToIntLocations() we promised that the
331 // use of the out_lo/out_hi wouldn't overlap with the use of
332 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
333 // until we're completely done reading from in_lo/in_hi.
334 // __ Rotr(TMP, in_lo, 16);
335 __ Sll(TMP, in_lo, 16);
336 __ Srl(AT, in_lo, 16);
337 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
338 // to write to out_hi.
339 // __ Rotr(out_lo, in_hi, 16);
340 __ Sll(AT, in_hi, 16);
341 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
342 // from in_lo/in_hi so it's okay to
343 // write to out_lo/out_hi.
344 __ Or(out_lo, out_lo, AT);
345 // __ Wsbh(out_hi, out_hi);
346 __ LoadConst32(AT, 0x00FF00FF);
347 __ And(out_hi, TMP, AT);
348 __ Sll(out_hi, out_hi, 8);
349 __ Srl(TMP, TMP, 8);
350 __ And(TMP, TMP, AT);
351 __ Or(out_hi, out_hi, TMP);
352 // __ Wsbh(out_lo, out_lo);
353 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
354 __ Sll(TMP, TMP, 8);
355 __ Srl(out_lo, out_lo, 8);
356 __ And(out_lo, out_lo, AT);
357 __ Or(out_lo, out_lo, TMP);
358 }
Chris Larsen70014c82015-11-18 12:26:08 -0800359 if (reverseBits) {
360 if (isR6) {
361 __ Bitswap(out_hi, out_hi);
362 __ Bitswap(out_lo, out_lo);
363 } else {
364 __ LoadConst32(AT, 0x0F0F0F0F);
365 __ And(TMP, out_hi, AT);
366 __ Sll(TMP, TMP, 4);
367 __ Srl(out_hi, out_hi, 4);
368 __ And(out_hi, out_hi, AT);
369 __ Or(out_hi, TMP, out_hi);
370 __ And(TMP, out_lo, AT);
371 __ Sll(TMP, TMP, 4);
372 __ Srl(out_lo, out_lo, 4);
373 __ And(out_lo, out_lo, AT);
374 __ Or(out_lo, TMP, out_lo);
375 __ LoadConst32(AT, 0x33333333);
376 __ And(TMP, out_hi, AT);
377 __ Sll(TMP, TMP, 2);
378 __ Srl(out_hi, out_hi, 2);
379 __ And(out_hi, out_hi, AT);
380 __ Or(out_hi, TMP, out_hi);
381 __ And(TMP, out_lo, AT);
382 __ Sll(TMP, TMP, 2);
383 __ Srl(out_lo, out_lo, 2);
384 __ And(out_lo, out_lo, AT);
385 __ Or(out_lo, TMP, out_lo);
386 __ LoadConst32(AT, 0x55555555);
387 __ And(TMP, out_hi, AT);
388 __ Sll(TMP, TMP, 1);
389 __ Srl(out_hi, out_hi, 1);
390 __ And(out_hi, out_hi, AT);
391 __ Or(out_hi, TMP, out_hi);
392 __ And(TMP, out_lo, AT);
393 __ Sll(TMP, TMP, 1);
394 __ Srl(out_lo, out_lo, 1);
395 __ And(out_lo, out_lo, AT);
396 __ Or(out_lo, TMP, out_lo);
397 }
398 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700399 }
400}
401
402// int java.lang.Integer.reverseBytes(int)
403void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
404 CreateIntToIntLocations(arena_, invoke);
405}
406
407void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800408 GenReverse(invoke->GetLocations(),
409 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800410 IsR2OrNewer(),
411 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800412 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800413 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700414}
415
416// long java.lang.Long.reverseBytes(long)
417void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
418 CreateIntToIntLocations(arena_, invoke);
419}
420
421void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800422 GenReverse(invoke->GetLocations(),
423 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800424 IsR2OrNewer(),
425 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800426 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800427 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700428}
429
430// short java.lang.Short.reverseBytes(short)
431void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
432 CreateIntToIntLocations(arena_, invoke);
433}
434
435void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800436 GenReverse(invoke->GetLocations(),
437 Primitive::kPrimShort,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800438 IsR2OrNewer(),
439 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800440 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800441 GetAssembler());
442}
443
Chris Larsene3845472015-11-18 12:27:15 -0800444static void GenNumberOfLeadingZeroes(LocationSummary* locations,
445 bool is64bit,
446 bool isR6,
447 MipsAssembler* assembler) {
448 Register out = locations->Out().AsRegister<Register>();
449 if (is64bit) {
450 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
451 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
452
453 if (isR6) {
454 __ ClzR6(AT, in_hi);
455 __ ClzR6(TMP, in_lo);
456 __ Seleqz(TMP, TMP, in_hi);
457 } else {
458 __ ClzR2(AT, in_hi);
459 __ ClzR2(TMP, in_lo);
460 __ Movn(TMP, ZERO, in_hi);
461 }
462 __ Addu(out, AT, TMP);
463 } else {
464 Register in = locations->InAt(0).AsRegister<Register>();
465
466 if (isR6) {
467 __ ClzR6(out, in);
468 } else {
469 __ ClzR2(out, in);
470 }
471 }
472}
473
474// int java.lang.Integer.numberOfLeadingZeros(int i)
475void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
476 CreateIntToIntLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800480 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800481}
482
483// int java.lang.Long.numberOfLeadingZeros(long i)
484void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
485 CreateIntToIntLocations(arena_, invoke);
486}
487
488void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800489 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800490}
491
Chris Larsen86829602015-11-18 12:27:52 -0800492static void GenNumberOfTrailingZeroes(LocationSummary* locations,
493 bool is64bit,
494 bool isR6,
Chris Larsen86829602015-11-18 12:27:52 -0800495 MipsAssembler* assembler) {
496 Register out = locations->Out().AsRegister<Register>();
497 Register in_lo;
498 Register in;
499
500 if (is64bit) {
Chris Larsen86829602015-11-18 12:27:52 -0800501 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
502
503 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
504
505 // If in_lo is zero then count the number of trailing zeroes in in_hi;
506 // otherwise count the number of trailing zeroes in in_lo.
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800507 // out = in_lo ? in_lo : in_hi;
Chris Larsen86829602015-11-18 12:27:52 -0800508 if (isR6) {
509 __ Seleqz(out, in_hi, in_lo);
510 __ Selnez(TMP, in_lo, in_lo);
511 __ Or(out, out, TMP);
512 } else {
513 __ Movz(out, in_hi, in_lo);
514 __ Movn(out, in_lo, in_lo);
515 }
516
517 in = out;
518 } else {
519 in = locations->InAt(0).AsRegister<Register>();
520 // Give in_lo a dummy value to keep the compiler from complaining.
521 // Since we only get here in the 32-bit case, this value will never
522 // be used.
523 in_lo = in;
524 }
525
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800526 if (isR6) {
527 // We don't have an instruction to count the number of trailing zeroes.
528 // Start by flipping the bits end-for-end so we can count the number of
529 // leading zeroes instead.
Chris Larsen86829602015-11-18 12:27:52 -0800530 __ Rotr(out, in, 16);
531 __ Wsbh(out, out);
Chris Larsen86829602015-11-18 12:27:52 -0800532 __ Bitswap(out, out);
533 __ ClzR6(out, out);
534 } else {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800535 // Convert trailing zeroes to trailing ones, and bits to their left
536 // to zeroes.
537 __ Addiu(TMP, in, -1);
538 __ Xor(out, TMP, in);
539 __ And(out, out, TMP);
540 // Count number of leading zeroes.
Chris Larsen86829602015-11-18 12:27:52 -0800541 __ ClzR2(out, out);
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800542 // Subtract number of leading zeroes from 32 to get number of trailing ones.
543 // Remember that the trailing ones were formerly trailing zeroes.
544 __ LoadConst32(TMP, 32);
545 __ Subu(out, TMP, out);
Chris Larsen86829602015-11-18 12:27:52 -0800546 }
547
548 if (is64bit) {
549 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
550 // number of trailing zeroes in in_lo (32) to get the correct final count
551 __ LoadConst32(TMP, 32);
552 if (isR6) {
553 __ Seleqz(TMP, TMP, in_lo);
554 } else {
555 __ Movn(TMP, ZERO, in_lo);
556 }
557 __ Addu(out, out, TMP);
558 }
559}
560
561// int java.lang.Integer.numberOfTrailingZeros(int i)
562void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
563 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
564}
565
566void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800567 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800568}
569
570// int java.lang.Long.numberOfTrailingZeros(long i)
571void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
572 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
573}
574
575void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800576 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler());
Chris Larsene16ce5a2015-11-18 12:30:20 -0800577}
578
Chris Larsen70014c82015-11-18 12:26:08 -0800579// int java.lang.Integer.reverse(int)
580void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
581 CreateIntToIntLocations(arena_, invoke);
582}
583
584void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
585 GenReverse(invoke->GetLocations(),
586 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800587 IsR2OrNewer(),
588 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800589 /* reverseBits */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800590 GetAssembler());
591}
592
593// long java.lang.Long.reverse(long)
594void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
595 CreateIntToIntLocations(arena_, invoke);
596}
597
598void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
599 GenReverse(invoke->GetLocations(),
600 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800601 IsR2OrNewer(),
602 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800603 /* reverseBits */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800604 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700605}
606
Chris Larsenb74353a2015-11-20 09:07:09 -0800607static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
608 LocationSummary* locations = new (arena) LocationSummary(invoke,
609 LocationSummary::kNoCall,
610 kIntrinsified);
611 locations->SetInAt(0, Location::RequiresFpuRegister());
612 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
613}
614
Chris Larsenedc16452016-02-12 17:59:00 -0800615static void GenBitCount(LocationSummary* locations,
616 Primitive::Type type,
617 bool isR6,
618 MipsAssembler* assembler) {
Chris Larsenedc16452016-02-12 17:59:00 -0800619 Register out = locations->Out().AsRegister<Register>();
620
621 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
622 //
623 // A generalization of the best bit counting method to integers of
624 // bit-widths up to 128 (parameterized by type T) is this:
625 //
626 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
627 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
628 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
629 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
630 //
631 // For comparison, for 32-bit quantities, this algorithm can be executed
632 // using 20 MIPS instructions (the calls to LoadConst32() generate two
633 // machine instructions each for the values being used in this algorithm).
634 // A(n unrolled) loop-based algorithm required 25 instructions.
635 //
636 // For 64-bit quantities, this algorithm gets executed twice, (once
637 // for in_lo, and again for in_hi), but saves a few instructions
638 // because the mask values only have to be loaded once. Using this
Chris Larsen8ca4f972016-04-14 16:16:29 -0700639 // algorithm the count for a 64-bit operand can be performed in 29
Chris Larsenedc16452016-02-12 17:59:00 -0800640 // instructions compared to a loop-based algorithm which required 47
641 // instructions.
642
643 if (type == Primitive::kPrimInt) {
644 Register in = locations->InAt(0).AsRegister<Register>();
645
646 __ Srl(TMP, in, 1);
647 __ LoadConst32(AT, 0x55555555);
648 __ And(TMP, TMP, AT);
649 __ Subu(TMP, in, TMP);
650 __ LoadConst32(AT, 0x33333333);
651 __ And(out, TMP, AT);
652 __ Srl(TMP, TMP, 2);
653 __ And(TMP, TMP, AT);
654 __ Addu(TMP, out, TMP);
655 __ Srl(out, TMP, 4);
656 __ Addu(out, out, TMP);
657 __ LoadConst32(AT, 0x0F0F0F0F);
658 __ And(out, out, AT);
659 __ LoadConst32(TMP, 0x01010101);
660 if (isR6) {
661 __ MulR6(out, out, TMP);
662 } else {
663 __ MulR2(out, out, TMP);
664 }
665 __ Srl(out, out, 24);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100666 } else {
667 DCHECK_EQ(type, Primitive::kPrimLong);
Chris Larsenedc16452016-02-12 17:59:00 -0800668 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
669 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
670 Register tmp_hi = locations->GetTemp(0).AsRegister<Register>();
671 Register out_hi = locations->GetTemp(1).AsRegister<Register>();
672 Register tmp_lo = TMP;
673 Register out_lo = out;
674
675 __ Srl(tmp_lo, in_lo, 1);
676 __ Srl(tmp_hi, in_hi, 1);
677
678 __ LoadConst32(AT, 0x55555555);
679
680 __ And(tmp_lo, tmp_lo, AT);
681 __ Subu(tmp_lo, in_lo, tmp_lo);
682
683 __ And(tmp_hi, tmp_hi, AT);
684 __ Subu(tmp_hi, in_hi, tmp_hi);
685
686 __ LoadConst32(AT, 0x33333333);
687
688 __ And(out_lo, tmp_lo, AT);
689 __ Srl(tmp_lo, tmp_lo, 2);
690 __ And(tmp_lo, tmp_lo, AT);
691 __ Addu(tmp_lo, out_lo, tmp_lo);
Chris Larsenedc16452016-02-12 17:59:00 -0800692
693 __ And(out_hi, tmp_hi, AT);
694 __ Srl(tmp_hi, tmp_hi, 2);
695 __ And(tmp_hi, tmp_hi, AT);
696 __ Addu(tmp_hi, out_hi, tmp_hi);
Chris Larsenedc16452016-02-12 17:59:00 -0800697
Chris Larsen8ca4f972016-04-14 16:16:29 -0700698 // Here we deviate from the original algorithm a bit. We've reached
699 // the stage where the bitfields holding the subtotals are large
700 // enough to hold the combined subtotals for both the low word, and
701 // the high word. This means that we can add the subtotals for the
702 // the high, and low words into a single word, and compute the final
703 // result for both the high, and low words using fewer instructions.
Chris Larsenedc16452016-02-12 17:59:00 -0800704 __ LoadConst32(AT, 0x0F0F0F0F);
705
Chris Larsen8ca4f972016-04-14 16:16:29 -0700706 __ Addu(TMP, tmp_hi, tmp_lo);
707
708 __ Srl(out, TMP, 4);
709 __ And(out, out, AT);
710 __ And(TMP, TMP, AT);
711 __ Addu(out, out, TMP);
Chris Larsenedc16452016-02-12 17:59:00 -0800712
713 __ LoadConst32(AT, 0x01010101);
714
715 if (isR6) {
Chris Larsen8ca4f972016-04-14 16:16:29 -0700716 __ MulR6(out, out, AT);
Chris Larsenedc16452016-02-12 17:59:00 -0800717 } else {
Chris Larsen8ca4f972016-04-14 16:16:29 -0700718 __ MulR2(out, out, AT);
Chris Larsenedc16452016-02-12 17:59:00 -0800719 }
720
Chris Larsen8ca4f972016-04-14 16:16:29 -0700721 __ Srl(out, out, 24);
Chris Larsenedc16452016-02-12 17:59:00 -0800722 }
723}
724
725// int java.lang.Integer.bitCount(int)
726void IntrinsicLocationsBuilderMIPS::VisitIntegerBitCount(HInvoke* invoke) {
727 CreateIntToIntLocations(arena_, invoke);
728}
729
730void IntrinsicCodeGeneratorMIPS::VisitIntegerBitCount(HInvoke* invoke) {
731 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
732}
733
734// int java.lang.Long.bitCount(int)
735void IntrinsicLocationsBuilderMIPS::VisitLongBitCount(HInvoke* invoke) {
736 LocationSummary* locations = new (arena_) LocationSummary(invoke,
737 LocationSummary::kNoCall,
738 kIntrinsified);
739 locations->SetInAt(0, Location::RequiresRegister());
740 locations->SetOut(Location::RequiresRegister());
741 locations->AddTemp(Location::RequiresRegister());
742 locations->AddTemp(Location::RequiresRegister());
743}
744
745void IntrinsicCodeGeneratorMIPS::VisitLongBitCount(HInvoke* invoke) {
746 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
747}
748
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100749static void MathAbsFP(LocationSummary* locations,
750 bool is64bit,
751 bool isR2OrNewer,
752 bool isR6,
753 MipsAssembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800754 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
755 FRegister out = locations->Out().AsFpuRegister<FRegister>();
756
Goran Jakovljevic5a6cbfc2017-01-13 12:13:39 +0100757 // Note, as a "quality of implementation", rather than pure "spec compliance", we require that
758 // Math.abs() clears the sign bit (but changes nothing else) for all numbers, including NaN
759 // (signaling NaN may become quiet though).
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100760 //
761 // The ABS.fmt instructions (abs.s and abs.d) do exactly that when NAN2008=1 (R6). For this case,
762 // both regular floating point numbers and NAN values are treated alike, only the sign bit is
763 // affected by this instruction.
764 // But when NAN2008=0 (R2 and before), the ABS.fmt instructions can't be used. For this case, any
765 // NaN operand signals invalid operation. This means that other bits (not just sign bit) might be
766 // changed when doing abs(NaN). Because of that, we clear sign bit in a different way.
767 if (isR6) {
768 if (is64bit) {
769 __ AbsD(out, in);
770 } else {
771 __ AbsS(out, in);
772 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800773 } else {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100774 if (is64bit) {
775 if (in != out) {
776 __ MovD(out, in);
777 }
778 __ MoveFromFpuHigh(TMP, in);
779 // ins instruction is not available for R1.
780 if (isR2OrNewer) {
781 __ Ins(TMP, ZERO, 31, 1);
782 } else {
783 __ Sll(TMP, TMP, 1);
784 __ Srl(TMP, TMP, 1);
785 }
786 __ MoveToFpuHigh(TMP, out);
787 } else {
788 __ Mfc1(TMP, in);
789 // ins instruction is not available for R1.
790 if (isR2OrNewer) {
791 __ Ins(TMP, ZERO, 31, 1);
792 } else {
793 __ Sll(TMP, TMP, 1);
794 __ Srl(TMP, TMP, 1);
795 }
796 __ Mtc1(TMP, out);
797 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800798 }
799}
800
801// double java.lang.Math.abs(double)
802void IntrinsicLocationsBuilderMIPS::VisitMathAbsDouble(HInvoke* invoke) {
803 CreateFPToFPLocations(arena_, invoke);
804}
805
806void IntrinsicCodeGeneratorMIPS::VisitMathAbsDouble(HInvoke* invoke) {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100807 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, IsR2OrNewer(), IsR6(), GetAssembler());
Chris Larsenb74353a2015-11-20 09:07:09 -0800808}
809
810// float java.lang.Math.abs(float)
811void IntrinsicLocationsBuilderMIPS::VisitMathAbsFloat(HInvoke* invoke) {
812 CreateFPToFPLocations(arena_, invoke);
813}
814
815void IntrinsicCodeGeneratorMIPS::VisitMathAbsFloat(HInvoke* invoke) {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100816 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, IsR2OrNewer(), IsR6(), GetAssembler());
Chris Larsenb74353a2015-11-20 09:07:09 -0800817}
818
819static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
820 if (is64bit) {
821 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
822 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
823 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
824 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
825
826 // The comments in this section show the analogous operations which would
827 // be performed if we had 64-bit registers "in", and "out".
828 // __ Dsra32(AT, in, 31);
829 __ Sra(AT, in_hi, 31);
830 // __ Xor(out, in, AT);
831 __ Xor(TMP, in_lo, AT);
832 __ Xor(out_hi, in_hi, AT);
833 // __ Dsubu(out, out, AT);
834 __ Subu(out_lo, TMP, AT);
835 __ Sltu(TMP, out_lo, TMP);
836 __ Addu(out_hi, out_hi, TMP);
837 } else {
838 Register in = locations->InAt(0).AsRegister<Register>();
839 Register out = locations->Out().AsRegister<Register>();
840
841 __ Sra(AT, in, 31);
842 __ Xor(out, in, AT);
843 __ Subu(out, out, AT);
844 }
845}
846
847// int java.lang.Math.abs(int)
848void IntrinsicLocationsBuilderMIPS::VisitMathAbsInt(HInvoke* invoke) {
849 CreateIntToIntLocations(arena_, invoke);
850}
851
852void IntrinsicCodeGeneratorMIPS::VisitMathAbsInt(HInvoke* invoke) {
853 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
854}
855
856// long java.lang.Math.abs(long)
857void IntrinsicLocationsBuilderMIPS::VisitMathAbsLong(HInvoke* invoke) {
858 CreateIntToIntLocations(arena_, invoke);
859}
860
861void IntrinsicCodeGeneratorMIPS::VisitMathAbsLong(HInvoke* invoke) {
862 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
863}
864
865static void GenMinMaxFP(LocationSummary* locations,
866 bool is_min,
867 Primitive::Type type,
868 bool is_R6,
869 MipsAssembler* assembler) {
870 FRegister out = locations->Out().AsFpuRegister<FRegister>();
871 FRegister a = locations->InAt(0).AsFpuRegister<FRegister>();
872 FRegister b = locations->InAt(1).AsFpuRegister<FRegister>();
873
874 if (is_R6) {
875 MipsLabel noNaNs;
876 MipsLabel done;
877 FRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
878
879 // When Java computes min/max it prefers a NaN to a number; the
880 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
881 // the inputs is a NaN and the other is a valid number, the MIPS
882 // instruction will return the number; Java wants the NaN value
883 // returned. This is why there is extra logic preceding the use of
884 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
885 // NaN, return the NaN, otherwise return the min/max.
886 if (type == Primitive::kPrimDouble) {
887 __ CmpUnD(FTMP, a, b);
888 __ Bc1eqz(FTMP, &noNaNs);
889
890 // One of the inputs is a NaN
891 __ CmpEqD(ftmp, a, a);
892 // If a == a then b is the NaN, otherwise a is the NaN.
893 __ SelD(ftmp, a, b);
894
895 if (ftmp != out) {
896 __ MovD(out, ftmp);
897 }
898
899 __ B(&done);
900
901 __ Bind(&noNaNs);
902
903 if (is_min) {
904 __ MinD(out, a, b);
905 } else {
906 __ MaxD(out, a, b);
907 }
908 } else {
909 DCHECK_EQ(type, Primitive::kPrimFloat);
910 __ CmpUnS(FTMP, a, b);
911 __ Bc1eqz(FTMP, &noNaNs);
912
913 // One of the inputs is a NaN
914 __ CmpEqS(ftmp, a, a);
915 // If a == a then b is the NaN, otherwise a is the NaN.
916 __ SelS(ftmp, a, b);
917
918 if (ftmp != out) {
919 __ MovS(out, ftmp);
920 }
921
922 __ B(&done);
923
924 __ Bind(&noNaNs);
925
926 if (is_min) {
927 __ MinS(out, a, b);
928 } else {
929 __ MaxS(out, a, b);
930 }
931 }
932
933 __ Bind(&done);
934 } else {
935 MipsLabel ordered;
936 MipsLabel compare;
937 MipsLabel select;
938 MipsLabel done;
939
940 if (type == Primitive::kPrimDouble) {
941 __ CunD(a, b);
942 } else {
943 DCHECK_EQ(type, Primitive::kPrimFloat);
944 __ CunS(a, b);
945 }
946 __ Bc1f(&ordered);
947
948 // a or b (or both) is a NaN. Return one, which is a NaN.
949 if (type == Primitive::kPrimDouble) {
950 __ CeqD(b, b);
951 } else {
952 __ CeqS(b, b);
953 }
954 __ B(&select);
955
956 __ Bind(&ordered);
957
958 // Neither is a NaN.
959 // a == b? (-0.0 compares equal with +0.0)
960 // If equal, handle zeroes, else compare further.
961 if (type == Primitive::kPrimDouble) {
962 __ CeqD(a, b);
963 } else {
964 __ CeqS(a, b);
965 }
966 __ Bc1f(&compare);
967
968 // a == b either bit for bit or one is -0.0 and the other is +0.0.
969 if (type == Primitive::kPrimDouble) {
970 __ MoveFromFpuHigh(TMP, a);
971 __ MoveFromFpuHigh(AT, b);
972 } else {
973 __ Mfc1(TMP, a);
974 __ Mfc1(AT, b);
975 }
976
977 if (is_min) {
978 // -0.0 prevails over +0.0.
979 __ Or(TMP, TMP, AT);
980 } else {
981 // +0.0 prevails over -0.0.
982 __ And(TMP, TMP, AT);
983 }
984
985 if (type == Primitive::kPrimDouble) {
986 __ Mfc1(AT, a);
987 __ Mtc1(AT, out);
988 __ MoveToFpuHigh(TMP, out);
989 } else {
990 __ Mtc1(TMP, out);
991 }
992 __ B(&done);
993
994 __ Bind(&compare);
995
996 if (type == Primitive::kPrimDouble) {
997 if (is_min) {
998 // return (a <= b) ? a : b;
999 __ ColeD(a, b);
1000 } else {
1001 // return (a >= b) ? a : b;
1002 __ ColeD(b, a); // b <= a
1003 }
1004 } else {
1005 if (is_min) {
1006 // return (a <= b) ? a : b;
1007 __ ColeS(a, b);
1008 } else {
1009 // return (a >= b) ? a : b;
1010 __ ColeS(b, a); // b <= a
1011 }
1012 }
1013
1014 __ Bind(&select);
1015
1016 if (type == Primitive::kPrimDouble) {
1017 __ MovtD(out, a);
1018 __ MovfD(out, b);
1019 } else {
1020 __ MovtS(out, a);
1021 __ MovfS(out, b);
1022 }
1023
1024 __ Bind(&done);
1025 }
1026}
1027
1028static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
1029 LocationSummary* locations = new (arena) LocationSummary(invoke,
1030 LocationSummary::kNoCall,
1031 kIntrinsified);
1032 locations->SetInAt(0, Location::RequiresFpuRegister());
1033 locations->SetInAt(1, Location::RequiresFpuRegister());
1034 locations->SetOut(Location::RequiresFpuRegister(), Location::kOutputOverlap);
1035}
1036
1037// double java.lang.Math.min(double, double)
1038void IntrinsicLocationsBuilderMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) {
1039 CreateFPFPToFPLocations(arena_, invoke);
1040}
1041
1042void IntrinsicCodeGeneratorMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) {
1043 GenMinMaxFP(invoke->GetLocations(),
1044 /* is_min */ true,
1045 Primitive::kPrimDouble,
1046 IsR6(),
1047 GetAssembler());
1048}
1049
1050// float java.lang.Math.min(float, float)
1051void IntrinsicLocationsBuilderMIPS::VisitMathMinFloatFloat(HInvoke* invoke) {
1052 CreateFPFPToFPLocations(arena_, invoke);
1053}
1054
1055void IntrinsicCodeGeneratorMIPS::VisitMathMinFloatFloat(HInvoke* invoke) {
1056 GenMinMaxFP(invoke->GetLocations(),
1057 /* is_min */ true,
1058 Primitive::kPrimFloat,
1059 IsR6(),
1060 GetAssembler());
1061}
1062
1063// double java.lang.Math.max(double, double)
1064void IntrinsicLocationsBuilderMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) {
1065 CreateFPFPToFPLocations(arena_, invoke);
1066}
1067
1068void IntrinsicCodeGeneratorMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) {
1069 GenMinMaxFP(invoke->GetLocations(),
1070 /* is_min */ false,
1071 Primitive::kPrimDouble,
1072 IsR6(),
1073 GetAssembler());
1074}
1075
1076// float java.lang.Math.max(float, float)
1077void IntrinsicLocationsBuilderMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) {
1078 CreateFPFPToFPLocations(arena_, invoke);
1079}
1080
1081void IntrinsicCodeGeneratorMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) {
1082 GenMinMaxFP(invoke->GetLocations(),
1083 /* is_min */ false,
1084 Primitive::kPrimFloat,
1085 IsR6(),
1086 GetAssembler());
1087}
1088
1089static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
1090 LocationSummary* locations = new (arena) LocationSummary(invoke,
1091 LocationSummary::kNoCall,
1092 kIntrinsified);
1093 locations->SetInAt(0, Location::RequiresRegister());
1094 locations->SetInAt(1, Location::RequiresRegister());
1095 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1096}
1097
1098static void GenMinMax(LocationSummary* locations,
1099 bool is_min,
1100 Primitive::Type type,
1101 bool is_R6,
1102 MipsAssembler* assembler) {
1103 if (is_R6) {
1104 // Some architectures, such as ARM and MIPS (prior to r6), have a
1105 // conditional move instruction which only changes the target
1106 // (output) register if the condition is true (MIPS prior to r6 had
1107 // MOVF, MOVT, MOVN, and MOVZ). The SELEQZ and SELNEZ instructions
1108 // always change the target (output) register. If the condition is
1109 // true the output register gets the contents of the "rs" register;
1110 // otherwise, the output register is set to zero. One consequence
1111 // of this is that to implement something like "rd = c==0 ? rs : rt"
1112 // MIPS64r6 needs to use a pair of SELEQZ/SELNEZ instructions.
1113 // After executing this pair of instructions one of the output
1114 // registers from the pair will necessarily contain zero. Then the
1115 // code ORs the output registers from the SELEQZ/SELNEZ instructions
1116 // to get the final result.
1117 //
1118 // The initial test to see if the output register is same as the
1119 // first input register is needed to make sure that value in the
1120 // first input register isn't clobbered before we've finished
1121 // computing the output value. The logic in the corresponding else
1122 // clause performs the same task but makes sure the second input
1123 // register isn't clobbered in the event that it's the same register
1124 // as the output register; the else clause also handles the case
1125 // where the output register is distinct from both the first, and the
1126 // second input registers.
1127 if (type == Primitive::kPrimLong) {
1128 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1129 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1130 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>();
1131 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>();
1132 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1133 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1134
1135 MipsLabel compare_done;
1136
1137 if (a_lo == b_lo) {
1138 if (out_lo != a_lo) {
1139 __ Move(out_lo, a_lo);
1140 __ Move(out_hi, a_hi);
1141 }
1142 } else {
1143 __ Slt(TMP, b_hi, a_hi);
1144 __ Bne(b_hi, a_hi, &compare_done);
1145
1146 __ Sltu(TMP, b_lo, a_lo);
1147
1148 __ Bind(&compare_done);
1149
1150 if (is_min) {
1151 __ Seleqz(AT, a_lo, TMP);
1152 __ Selnez(out_lo, b_lo, TMP); // Safe even if out_lo == a_lo/b_lo
1153 // because at this point we're
1154 // done using a_lo/b_lo.
1155 } else {
1156 __ Selnez(AT, a_lo, TMP);
1157 __ Seleqz(out_lo, b_lo, TMP); // ditto
1158 }
1159 __ Or(out_lo, out_lo, AT);
1160 if (is_min) {
1161 __ Seleqz(AT, a_hi, TMP);
1162 __ Selnez(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi
1163 } else {
1164 __ Selnez(AT, a_hi, TMP);
1165 __ Seleqz(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi
1166 }
1167 __ Or(out_hi, out_hi, AT);
1168 }
1169 } else {
1170 DCHECK_EQ(type, Primitive::kPrimInt);
1171 Register a = locations->InAt(0).AsRegister<Register>();
1172 Register b = locations->InAt(1).AsRegister<Register>();
1173 Register out = locations->Out().AsRegister<Register>();
1174
1175 if (a == b) {
1176 if (out != a) {
1177 __ Move(out, a);
1178 }
1179 } else {
1180 __ Slt(AT, b, a);
1181 if (is_min) {
1182 __ Seleqz(TMP, a, AT);
1183 __ Selnez(AT, b, AT);
1184 } else {
1185 __ Selnez(TMP, a, AT);
1186 __ Seleqz(AT, b, AT);
1187 }
1188 __ Or(out, TMP, AT);
1189 }
1190 }
1191 } else {
1192 if (type == Primitive::kPrimLong) {
1193 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1194 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1195 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>();
1196 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>();
1197 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1198 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1199
1200 MipsLabel compare_done;
1201
1202 if (a_lo == b_lo) {
1203 if (out_lo != a_lo) {
1204 __ Move(out_lo, a_lo);
1205 __ Move(out_hi, a_hi);
1206 }
1207 } else {
1208 __ Slt(TMP, a_hi, b_hi);
1209 __ Bne(a_hi, b_hi, &compare_done);
1210
1211 __ Sltu(TMP, a_lo, b_lo);
1212
1213 __ Bind(&compare_done);
1214
1215 if (is_min) {
1216 if (out_lo != a_lo) {
1217 __ Movn(out_hi, a_hi, TMP);
1218 __ Movn(out_lo, a_lo, TMP);
1219 }
1220 if (out_lo != b_lo) {
1221 __ Movz(out_hi, b_hi, TMP);
1222 __ Movz(out_lo, b_lo, TMP);
1223 }
1224 } else {
1225 if (out_lo != a_lo) {
1226 __ Movz(out_hi, a_hi, TMP);
1227 __ Movz(out_lo, a_lo, TMP);
1228 }
1229 if (out_lo != b_lo) {
1230 __ Movn(out_hi, b_hi, TMP);
1231 __ Movn(out_lo, b_lo, TMP);
1232 }
1233 }
1234 }
1235 } else {
1236 DCHECK_EQ(type, Primitive::kPrimInt);
1237 Register a = locations->InAt(0).AsRegister<Register>();
1238 Register b = locations->InAt(1).AsRegister<Register>();
1239 Register out = locations->Out().AsRegister<Register>();
1240
1241 if (a == b) {
1242 if (out != a) {
1243 __ Move(out, a);
1244 }
1245 } else {
1246 __ Slt(AT, a, b);
1247 if (is_min) {
1248 if (out != a) {
1249 __ Movn(out, a, AT);
1250 }
1251 if (out != b) {
1252 __ Movz(out, b, AT);
1253 }
1254 } else {
1255 if (out != a) {
1256 __ Movz(out, a, AT);
1257 }
1258 if (out != b) {
1259 __ Movn(out, b, AT);
1260 }
1261 }
1262 }
1263 }
1264 }
1265}
1266
1267// int java.lang.Math.min(int, int)
1268void IntrinsicLocationsBuilderMIPS::VisitMathMinIntInt(HInvoke* invoke) {
1269 CreateIntIntToIntLocations(arena_, invoke);
1270}
1271
1272void IntrinsicCodeGeneratorMIPS::VisitMathMinIntInt(HInvoke* invoke) {
1273 GenMinMax(invoke->GetLocations(),
1274 /* is_min */ true,
1275 Primitive::kPrimInt,
1276 IsR6(),
1277 GetAssembler());
1278}
1279
1280// long java.lang.Math.min(long, long)
1281void IntrinsicLocationsBuilderMIPS::VisitMathMinLongLong(HInvoke* invoke) {
1282 CreateIntIntToIntLocations(arena_, invoke);
1283}
1284
1285void IntrinsicCodeGeneratorMIPS::VisitMathMinLongLong(HInvoke* invoke) {
1286 GenMinMax(invoke->GetLocations(),
1287 /* is_min */ true,
1288 Primitive::kPrimLong,
1289 IsR6(),
1290 GetAssembler());
1291}
1292
1293// int java.lang.Math.max(int, int)
1294void IntrinsicLocationsBuilderMIPS::VisitMathMaxIntInt(HInvoke* invoke) {
1295 CreateIntIntToIntLocations(arena_, invoke);
1296}
1297
1298void IntrinsicCodeGeneratorMIPS::VisitMathMaxIntInt(HInvoke* invoke) {
1299 GenMinMax(invoke->GetLocations(),
1300 /* is_min */ false,
1301 Primitive::kPrimInt,
1302 IsR6(),
1303 GetAssembler());
1304}
1305
1306// long java.lang.Math.max(long, long)
1307void IntrinsicLocationsBuilderMIPS::VisitMathMaxLongLong(HInvoke* invoke) {
1308 CreateIntIntToIntLocations(arena_, invoke);
1309}
1310
1311void IntrinsicCodeGeneratorMIPS::VisitMathMaxLongLong(HInvoke* invoke) {
1312 GenMinMax(invoke->GetLocations(),
1313 /* is_min */ false,
1314 Primitive::kPrimLong,
1315 IsR6(),
1316 GetAssembler());
1317}
1318
1319// double java.lang.Math.sqrt(double)
1320void IntrinsicLocationsBuilderMIPS::VisitMathSqrt(HInvoke* invoke) {
1321 CreateFPToFPLocations(arena_, invoke);
1322}
1323
1324void IntrinsicCodeGeneratorMIPS::VisitMathSqrt(HInvoke* invoke) {
1325 LocationSummary* locations = invoke->GetLocations();
1326 MipsAssembler* assembler = GetAssembler();
1327 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
1328 FRegister out = locations->Out().AsFpuRegister<FRegister>();
1329
1330 __ SqrtD(out, in);
1331}
1332
Chris Larsen3acee732015-11-18 13:31:08 -08001333// byte libcore.io.Memory.peekByte(long address)
1334void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
1335 CreateIntToIntLocations(arena_, invoke);
1336}
1337
1338void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
1339 MipsAssembler* assembler = GetAssembler();
1340 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1341 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1342
1343 __ Lb(out, adr, 0);
1344}
1345
1346// short libcore.io.Memory.peekShort(long address)
1347void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
1348 CreateIntToIntLocations(arena_, invoke);
1349}
1350
1351void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
1352 MipsAssembler* assembler = GetAssembler();
1353 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1354 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1355
1356 if (IsR6()) {
1357 __ Lh(out, adr, 0);
1358 } else if (IsR2OrNewer()) {
1359 // Unlike for words, there are no lhl/lhr instructions to load
1360 // unaligned halfwords so the code loads individual bytes, in case
1361 // the address isn't halfword-aligned, and assembles them into a
1362 // signed halfword.
1363 __ Lb(AT, adr, 1); // This byte must be sign-extended.
1364 __ Lb(out, adr, 0); // This byte can be either sign-extended, or
1365 // zero-extended because the following
1366 // instruction overwrites the sign bits.
1367 __ Ins(out, AT, 8, 24);
1368 } else {
1369 __ Lbu(AT, adr, 0); // This byte must be zero-extended. If it's not
1370 // the "or" instruction below will destroy the upper
1371 // 24 bits of the final result.
1372 __ Lb(out, adr, 1); // This byte must be sign-extended.
1373 __ Sll(out, out, 8);
1374 __ Or(out, out, AT);
1375 }
1376}
1377
1378// int libcore.io.Memory.peekInt(long address)
1379void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
1380 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
1381}
1382
1383void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
1384 MipsAssembler* assembler = GetAssembler();
1385 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1386 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1387
1388 if (IsR6()) {
1389 __ Lw(out, adr, 0);
1390 } else {
1391 __ Lwr(out, adr, 0);
1392 __ Lwl(out, adr, 3);
1393 }
1394}
1395
1396// long libcore.io.Memory.peekLong(long address)
1397void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
1398 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
1399}
1400
1401void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
1402 MipsAssembler* assembler = GetAssembler();
1403 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1404 Register out_lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
1405 Register out_hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
1406
1407 if (IsR6()) {
1408 __ Lw(out_lo, adr, 0);
1409 __ Lw(out_hi, adr, 4);
1410 } else {
1411 __ Lwr(out_lo, adr, 0);
1412 __ Lwl(out_lo, adr, 3);
1413 __ Lwr(out_hi, adr, 4);
1414 __ Lwl(out_hi, adr, 7);
1415 }
1416}
1417
1418static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1419 LocationSummary* locations = new (arena) LocationSummary(invoke,
1420 LocationSummary::kNoCall,
1421 kIntrinsified);
1422 locations->SetInAt(0, Location::RequiresRegister());
1423 locations->SetInAt(1, Location::RequiresRegister());
1424}
1425
1426// void libcore.io.Memory.pokeByte(long address, byte value)
1427void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
1428 CreateIntIntToVoidLocations(arena_, invoke);
1429}
1430
1431void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
1432 MipsAssembler* assembler = GetAssembler();
1433 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1434 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1435
1436 __ Sb(val, adr, 0);
1437}
1438
1439// void libcore.io.Memory.pokeShort(long address, short value)
1440void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
1441 CreateIntIntToVoidLocations(arena_, invoke);
1442}
1443
1444void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
1445 MipsAssembler* assembler = GetAssembler();
1446 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1447 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1448
1449 if (IsR6()) {
1450 __ Sh(val, adr, 0);
1451 } else {
1452 // Unlike for words, there are no shl/shr instructions to store
1453 // unaligned halfwords so the code stores individual bytes, in case
1454 // the address isn't halfword-aligned.
1455 __ Sb(val, adr, 0);
1456 __ Srl(AT, val, 8);
1457 __ Sb(AT, adr, 1);
1458 }
1459}
1460
1461// void libcore.io.Memory.pokeInt(long address, int value)
1462void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
1463 CreateIntIntToVoidLocations(arena_, invoke);
1464}
1465
1466void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
1467 MipsAssembler* assembler = GetAssembler();
1468 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1469 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1470
1471 if (IsR6()) {
1472 __ Sw(val, adr, 0);
1473 } else {
1474 __ Swr(val, adr, 0);
1475 __ Swl(val, adr, 3);
1476 }
1477}
1478
1479// void libcore.io.Memory.pokeLong(long address, long value)
1480void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
1481 CreateIntIntToVoidLocations(arena_, invoke);
1482}
1483
1484void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
1485 MipsAssembler* assembler = GetAssembler();
1486 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1487 Register val_lo = invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>();
1488 Register val_hi = invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>();
1489
1490 if (IsR6()) {
1491 __ Sw(val_lo, adr, 0);
1492 __ Sw(val_hi, adr, 4);
1493 } else {
1494 __ Swr(val_lo, adr, 0);
1495 __ Swl(val_lo, adr, 3);
1496 __ Swr(val_hi, adr, 4);
1497 __ Swl(val_hi, adr, 7);
1498 }
1499}
1500
Chris Larsencf283da2016-01-19 16:45:35 -08001501// Thread java.lang.Thread.currentThread()
1502void IntrinsicLocationsBuilderMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
1503 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1504 LocationSummary::kNoCall,
1505 kIntrinsified);
1506 locations->SetOut(Location::RequiresRegister());
1507}
1508
1509void IntrinsicCodeGeneratorMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
1510 MipsAssembler* assembler = GetAssembler();
1511 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1512
1513 __ LoadFromOffset(kLoadWord,
1514 out,
1515 TR,
1516 Thread::PeerOffset<kMipsPointerSize>().Int32Value());
1517}
1518
Alexey Frunze15958152017-02-09 19:08:30 -08001519static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1520 HInvoke* invoke,
1521 Primitive::Type type) {
1522 bool can_call = kEmitCompilerReadBarrier &&
1523 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1524 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001525 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001526 (can_call
1527 ? LocationSummary::kCallOnSlowPath
1528 : LocationSummary::kNoCall),
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001529 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001530 if (can_call && kUseBakerReadBarrier) {
1531 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1532 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001533 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1534 locations->SetInAt(1, Location::RequiresRegister());
1535 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001536 locations->SetOut(Location::RequiresRegister(),
1537 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
1538 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1539 // We need a temporary register for the read barrier marking slow
1540 // path in InstructionCodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier.
1541 locations->AddTemp(Location::RequiresRegister());
1542 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001543}
1544
Alexey Frunze15958152017-02-09 19:08:30 -08001545// Note that the caller must supply a properly aligned memory address.
1546// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001547static void GenUnsafeGet(HInvoke* invoke,
1548 Primitive::Type type,
1549 bool is_volatile,
1550 bool is_R6,
1551 CodeGeneratorMIPS* codegen) {
1552 LocationSummary* locations = invoke->GetLocations();
1553 DCHECK((type == Primitive::kPrimInt) ||
1554 (type == Primitive::kPrimLong) ||
1555 (type == Primitive::kPrimNot)) << type;
1556 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001557 // Target register.
1558 Location trg_loc = locations->Out();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001559 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001560 Location base_loc = locations->InAt(1);
1561 Register base = base_loc.AsRegister<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001562 // The "offset" argument is passed as a "long". Since this code is for
1563 // a 32-bit processor, we can only use 32-bit addresses, so we only
1564 // need the low 32-bits of offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001565 Location offset_loc = locations->InAt(2);
1566 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001567
Alexey Frunze15958152017-02-09 19:08:30 -08001568 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == Primitive::kPrimNot))) {
1569 __ Addu(TMP, base, offset_lo);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001570 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001571
Alexey Frunze15958152017-02-09 19:08:30 -08001572 switch (type) {
1573 case Primitive::kPrimLong: {
1574 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
1575 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
1576 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile loads.
1577 if (is_R6) {
1578 __ Lw(trg_lo, TMP, 0);
1579 __ Lw(trg_hi, TMP, 4);
1580 } else {
1581 __ Lwr(trg_lo, TMP, 0);
1582 __ Lwl(trg_lo, TMP, 3);
1583 __ Lwr(trg_hi, TMP, 4);
1584 __ Lwl(trg_hi, TMP, 7);
1585 }
1586 break;
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001587 }
Alexey Frunzec061de12017-02-14 13:27:23 -08001588
Alexey Frunze15958152017-02-09 19:08:30 -08001589 case Primitive::kPrimInt: {
1590 Register trg = trg_loc.AsRegister<Register>();
1591 if (is_R6) {
1592 __ Lw(trg, TMP, 0);
1593 } else {
1594 __ Lwr(trg, TMP, 0);
1595 __ Lwl(trg, TMP, 3);
1596 }
1597 if (is_volatile) {
1598 __ Sync(0);
1599 }
1600 break;
Alexey Frunzec061de12017-02-14 13:27:23 -08001601 }
Alexey Frunze15958152017-02-09 19:08:30 -08001602
1603 case Primitive::kPrimNot: {
1604 Register trg = trg_loc.AsRegister<Register>();
1605 if (kEmitCompilerReadBarrier) {
1606 if (kUseBakerReadBarrier) {
1607 Location temp = locations->GetTemp(0);
1608 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1609 trg_loc,
1610 base,
1611 /* offset */ 0U,
1612 /* index */ offset_loc,
1613 TIMES_1,
1614 temp,
1615 /* needs_null_check */ false);
1616 if (is_volatile) {
1617 __ Sync(0);
1618 }
1619 } else {
1620 if (is_R6) {
1621 __ Lw(trg, TMP, 0);
1622 } else {
1623 __ Lwr(trg, TMP, 0);
1624 __ Lwl(trg, TMP, 3);
1625 }
1626 if (is_volatile) {
1627 __ Sync(0);
1628 }
1629 codegen->GenerateReadBarrierSlow(invoke,
1630 trg_loc,
1631 trg_loc,
1632 base_loc,
1633 /* offset */ 0U,
1634 /* index */ offset_loc);
1635 }
1636 } else {
1637 if (is_R6) {
1638 __ Lw(trg, TMP, 0);
1639 } else {
1640 __ Lwr(trg, TMP, 0);
1641 __ Lwl(trg, TMP, 3);
1642 }
1643 if (is_volatile) {
1644 __ Sync(0);
1645 }
1646 __ MaybeUnpoisonHeapReference(trg);
1647 }
1648 break;
1649 }
1650
1651 default:
1652 LOG(FATAL) << "Unexpected type " << type;
1653 UNREACHABLE();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001654 }
1655}
1656
1657// int sun.misc.Unsafe.getInt(Object o, long offset)
1658void IntrinsicLocationsBuilderMIPS::VisitUnsafeGet(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001659 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001660}
1661
1662void IntrinsicCodeGeneratorMIPS::VisitUnsafeGet(HInvoke* invoke) {
1663 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, IsR6(), codegen_);
1664}
1665
1666// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1667void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001668 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001669}
1670
1671void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
1672 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, IsR6(), codegen_);
1673}
1674
1675// long sun.misc.Unsafe.getLong(Object o, long offset)
1676void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001677 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001678}
1679
1680void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
1681 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, IsR6(), codegen_);
1682}
1683
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001684// Object sun.misc.Unsafe.getObject(Object o, long offset)
1685void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001686 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001687}
1688
1689void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
1690 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, IsR6(), codegen_);
1691}
1692
1693// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1694void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001695 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001696}
1697
1698void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1699 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, IsR6(), codegen_);
1700}
1701
1702static void CreateIntIntIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1703 LocationSummary* locations = new (arena) LocationSummary(invoke,
1704 LocationSummary::kNoCall,
1705 kIntrinsified);
1706 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1707 locations->SetInAt(1, Location::RequiresRegister());
1708 locations->SetInAt(2, Location::RequiresRegister());
1709 locations->SetInAt(3, Location::RequiresRegister());
1710}
1711
Alexey Frunze15958152017-02-09 19:08:30 -08001712// Note that the caller must supply a properly aligned memory address.
1713// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001714static void GenUnsafePut(LocationSummary* locations,
1715 Primitive::Type type,
1716 bool is_volatile,
1717 bool is_ordered,
1718 bool is_R6,
1719 CodeGeneratorMIPS* codegen) {
1720 DCHECK((type == Primitive::kPrimInt) ||
1721 (type == Primitive::kPrimLong) ||
1722 (type == Primitive::kPrimNot)) << type;
1723 MipsAssembler* assembler = codegen->GetAssembler();
1724 // Object pointer.
1725 Register base = locations->InAt(1).AsRegister<Register>();
1726 // The "offset" argument is passed as a "long", i.e., it's 64-bits in
1727 // size. Since this code is for a 32-bit processor, we can only use
1728 // 32-bit addresses, so we only need the low 32-bits of offset.
1729 Register offset_lo = locations->InAt(2).AsRegisterPairLow<Register>();
1730
1731 __ Addu(TMP, base, offset_lo);
1732 if (is_volatile || is_ordered) {
1733 __ Sync(0);
1734 }
1735 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1736 Register value = locations->InAt(3).AsRegister<Register>();
1737
Alexey Frunzec061de12017-02-14 13:27:23 -08001738 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1739 __ PoisonHeapReference(AT, value);
1740 value = AT;
1741 }
1742
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001743 if (is_R6) {
1744 __ Sw(value, TMP, 0);
1745 } else {
1746 __ Swr(value, TMP, 0);
1747 __ Swl(value, TMP, 3);
1748 }
1749 } else {
1750 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
1751 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001752 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile stores.
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001753 if (is_R6) {
1754 __ Sw(value_lo, TMP, 0);
1755 __ Sw(value_hi, TMP, 4);
1756 } else {
1757 __ Swr(value_lo, TMP, 0);
1758 __ Swl(value_lo, TMP, 3);
1759 __ Swr(value_hi, TMP, 4);
1760 __ Swl(value_hi, TMP, 7);
1761 }
1762 }
1763
1764 if (is_volatile) {
1765 __ Sync(0);
1766 }
1767
1768 if (type == Primitive::kPrimNot) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01001769 bool value_can_be_null = true; // TODO: Worth finding out this information?
1770 codegen->MarkGCCard(base, locations->InAt(3).AsRegister<Register>(), value_can_be_null);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001771 }
1772}
1773
1774// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1775void IntrinsicLocationsBuilderMIPS::VisitUnsafePut(HInvoke* invoke) {
1776 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1777}
1778
1779void IntrinsicCodeGeneratorMIPS::VisitUnsafePut(HInvoke* invoke) {
1780 GenUnsafePut(invoke->GetLocations(),
1781 Primitive::kPrimInt,
1782 /* is_volatile */ false,
1783 /* is_ordered */ false,
1784 IsR6(),
1785 codegen_);
1786}
1787
1788// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1789void IntrinsicLocationsBuilderMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
1790 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1791}
1792
1793void IntrinsicCodeGeneratorMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
1794 GenUnsafePut(invoke->GetLocations(),
1795 Primitive::kPrimInt,
1796 /* is_volatile */ false,
1797 /* is_ordered */ true,
1798 IsR6(),
1799 codegen_);
1800}
1801
1802// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1803void IntrinsicLocationsBuilderMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
1804 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1805}
1806
1807void IntrinsicCodeGeneratorMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
1808 GenUnsafePut(invoke->GetLocations(),
1809 Primitive::kPrimInt,
1810 /* is_volatile */ true,
1811 /* is_ordered */ false,
1812 IsR6(),
1813 codegen_);
1814}
1815
1816// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1817void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObject(HInvoke* invoke) {
1818 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1819}
1820
1821void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObject(HInvoke* invoke) {
1822 GenUnsafePut(invoke->GetLocations(),
1823 Primitive::kPrimNot,
1824 /* is_volatile */ false,
1825 /* is_ordered */ false,
1826 IsR6(),
1827 codegen_);
1828}
1829
1830// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1831void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1832 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1833}
1834
1835void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1836 GenUnsafePut(invoke->GetLocations(),
1837 Primitive::kPrimNot,
1838 /* is_volatile */ false,
1839 /* is_ordered */ true,
1840 IsR6(),
1841 codegen_);
1842}
1843
1844// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1845void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1846 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1847}
1848
1849void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1850 GenUnsafePut(invoke->GetLocations(),
1851 Primitive::kPrimNot,
1852 /* is_volatile */ true,
1853 /* is_ordered */ false,
1854 IsR6(),
1855 codegen_);
1856}
1857
1858// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1859void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLong(HInvoke* invoke) {
1860 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1861}
1862
1863void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLong(HInvoke* invoke) {
1864 GenUnsafePut(invoke->GetLocations(),
1865 Primitive::kPrimLong,
1866 /* is_volatile */ false,
1867 /* is_ordered */ false,
1868 IsR6(),
1869 codegen_);
1870}
1871
1872// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1873void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1874 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1875}
1876
1877void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1878 GenUnsafePut(invoke->GetLocations(),
1879 Primitive::kPrimLong,
1880 /* is_volatile */ false,
1881 /* is_ordered */ true,
1882 IsR6(),
1883 codegen_);
1884}
1885
Alexey Frunze15958152017-02-09 19:08:30 -08001886static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, HInvoke* invoke) {
1887 bool can_call = kEmitCompilerReadBarrier &&
1888 kUseBakerReadBarrier &&
1889 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001890 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001891 (can_call
1892 ? LocationSummary::kCallOnSlowPath
1893 : LocationSummary::kNoCall),
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001894 kIntrinsified);
1895 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1896 locations->SetInAt(1, Location::RequiresRegister());
1897 locations->SetInAt(2, Location::RequiresRegister());
1898 locations->SetInAt(3, Location::RequiresRegister());
1899 locations->SetInAt(4, Location::RequiresRegister());
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001900 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001901
1902 // Temporary register used in CAS by (Baker) read barrier.
1903 if (can_call) {
1904 locations->AddTemp(Location::RequiresRegister());
1905 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001906}
1907
Alexey Frunze15958152017-02-09 19:08:30 -08001908// Note that the caller must supply a properly aligned memory address.
1909// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
1910static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorMIPS* codegen) {
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001911 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001912 LocationSummary* locations = invoke->GetLocations();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001913 bool isR6 = codegen->GetInstructionSetFeatures().IsR6();
1914 Register base = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001915 Location offset_loc = locations->InAt(2);
1916 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001917 Register expected = locations->InAt(3).AsRegister<Register>();
1918 Register value = locations->InAt(4).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001919 Location out_loc = locations->Out();
1920 Register out = out_loc.AsRegister<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001921
1922 DCHECK_NE(base, out);
1923 DCHECK_NE(offset_lo, out);
1924 DCHECK_NE(expected, out);
1925
1926 if (type == Primitive::kPrimNot) {
Alexey Frunze15958152017-02-09 19:08:30 -08001927 // The only read barrier implementation supporting the
1928 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1929 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1930
1931 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1932 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevice114da22016-12-26 14:21:43 +01001933 bool value_can_be_null = true; // TODO: Worth finding out this information?
1934 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001935
1936 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1937 Location temp = locations->GetTemp(0);
1938 // Need to make sure the reference stored in the field is a to-space
1939 // one before attempting the CAS or the CAS could fail incorrectly.
1940 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1941 invoke,
1942 out_loc, // Unused, used only as a "temporary" within the read barrier.
1943 base,
1944 /* offset */ 0u,
1945 /* index */ offset_loc,
1946 ScaleFactor::TIMES_1,
1947 temp,
1948 /* needs_null_check */ false,
1949 /* always_update_field */ true);
1950 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001951 }
1952
Alexey Frunzec061de12017-02-14 13:27:23 -08001953 MipsLabel loop_head, exit_loop;
1954 __ Addu(TMP, base, offset_lo);
1955
1956 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1957 __ PoisonHeapReference(expected);
1958 // Do not poison `value`, if it is the same register as
1959 // `expected`, which has just been poisoned.
1960 if (value != expected) {
1961 __ PoisonHeapReference(value);
1962 }
1963 }
1964
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001965 // do {
1966 // tmp_value = [tmp_ptr] - expected;
1967 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1968 // result = tmp_value != 0;
1969
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001970 __ Sync(0);
1971 __ Bind(&loop_head);
1972 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1973 if (isR6) {
1974 __ LlR6(out, TMP);
1975 } else {
1976 __ LlR2(out, TMP);
1977 }
1978 } else {
Alexey Frunzec061de12017-02-14 13:27:23 -08001979 LOG(FATAL) << "Unsupported op size " << type;
1980 UNREACHABLE();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001981 }
1982 __ Subu(out, out, expected); // If we didn't get the 'expected'
1983 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1984 __ Beqz(out, &exit_loop); // return.
1985 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1986 // If we use 'value' directly, we would lose 'value'
1987 // in the case that the store fails. Whether the
1988 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001989 // correct Boolean value into the 'out' register.
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001990 // This test isn't really necessary. We only support Primitive::kPrimInt,
1991 // Primitive::kPrimNot, and we already verified that we're working on one
1992 // of those two types. It's left here in case the code needs to support
1993 // other types in the future.
1994 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1995 if (isR6) {
1996 __ ScR6(out, TMP);
1997 } else {
1998 __ ScR2(out, TMP);
1999 }
2000 }
2001 __ Beqz(out, &loop_head); // If we couldn't do the read-modify-write
2002 // cycle atomically then retry.
2003 __ Bind(&exit_loop);
2004 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08002005
2006 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2007 __ UnpoisonHeapReference(expected);
2008 // Do not unpoison `value`, if it is the same register as
2009 // `expected`, which has just been unpoisoned.
2010 if (value != expected) {
2011 __ UnpoisonHeapReference(value);
2012 }
2013 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002014}
2015
2016// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
2017void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002018 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002019}
2020
2021void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002022 GenCas(invoke, Primitive::kPrimInt, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002023}
2024
2025// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
2026void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002027 // The only read barrier implementation supporting the
2028 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2029 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
2030 return;
2031 }
2032
2033 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002034}
2035
2036void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002037 // The only read barrier implementation supporting the
2038 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2039 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
2040
2041 GenCas(invoke, Primitive::kPrimNot, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002042}
2043
Chris Larsencf283da2016-01-19 16:45:35 -08002044// int java.lang.String.compareTo(String anotherString)
2045void IntrinsicLocationsBuilderMIPS::VisitStringCompareTo(HInvoke* invoke) {
2046 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescufca16662016-07-14 09:21:59 +01002047 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002048 kIntrinsified);
2049 InvokeRuntimeCallingConvention calling_convention;
2050 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2051 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2052 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2053 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2054}
2055
2056void IntrinsicCodeGeneratorMIPS::VisitStringCompareTo(HInvoke* invoke) {
2057 MipsAssembler* assembler = GetAssembler();
2058 LocationSummary* locations = invoke->GetLocations();
2059
2060 // Note that the null check must have been done earlier.
2061 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2062
2063 Register argument = locations->InAt(1).AsRegister<Register>();
2064 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2065 codegen_->AddSlowPath(slow_path);
2066 __ Beqz(argument, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002067 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002068 __ Bind(slow_path->GetExitLabel());
2069}
2070
Chris Larsen16ba2b42015-11-02 10:58:31 -08002071// boolean java.lang.String.equals(Object anObject)
2072void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
2073 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2074 LocationSummary::kNoCall,
2075 kIntrinsified);
2076 locations->SetInAt(0, Location::RequiresRegister());
2077 locations->SetInAt(1, Location::RequiresRegister());
2078 locations->SetOut(Location::RequiresRegister());
2079
2080 // Temporary registers to store lengths of strings and for calculations.
2081 locations->AddTemp(Location::RequiresRegister());
2082 locations->AddTemp(Location::RequiresRegister());
2083 locations->AddTemp(Location::RequiresRegister());
2084}
2085
2086void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
2087 MipsAssembler* assembler = GetAssembler();
2088 LocationSummary* locations = invoke->GetLocations();
2089
2090 Register str = locations->InAt(0).AsRegister<Register>();
2091 Register arg = locations->InAt(1).AsRegister<Register>();
2092 Register out = locations->Out().AsRegister<Register>();
2093
2094 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2095 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
2096 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
2097
2098 MipsLabel loop;
2099 MipsLabel end;
2100 MipsLabel return_true;
2101 MipsLabel return_false;
2102
2103 // Get offsets of count, value, and class fields within a string object.
2104 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2105 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2106 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
2107
2108 // Note that the null check must have been done earlier.
2109 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2110
2111 // If the register containing the pointer to "this", and the register
2112 // containing the pointer to "anObject" are the same register then
2113 // "this", and "anObject" are the same object and we can
2114 // short-circuit the logic to a true result.
2115 if (str == arg) {
2116 __ LoadConst32(out, 1);
2117 return;
2118 }
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01002119 StringEqualsOptimizations optimizations(invoke);
2120 if (!optimizations.GetArgumentNotNull()) {
2121 // Check if input is null, return false if it is.
2122 __ Beqz(arg, &return_false);
2123 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08002124
2125 // Reference equality check, return true if same reference.
2126 __ Beq(str, arg, &return_true);
2127
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01002128 if (!optimizations.GetArgumentIsString()) {
2129 // Instanceof check for the argument by comparing class fields.
2130 // All string objects must have the same type since String cannot be subclassed.
2131 // Receiver must be a string object, so its class field is equal to all strings' class fields.
2132 // If the argument is a string object, its class field must be equal to receiver's class field.
2133 __ Lw(temp1, str, class_offset);
2134 __ Lw(temp2, arg, class_offset);
2135 __ Bne(temp1, temp2, &return_false);
2136 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08002137
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002138 // Load `count` fields of this and argument strings.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002139 __ Lw(temp1, str, count_offset);
2140 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002141 // Check if `count` fields are equal, return false if they're not.
2142 // Also compares the compression style, if differs return false.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002143 __ Bne(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002144 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
2145 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2146 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen16ba2b42015-11-02 10:58:31 -08002147 __ Beqz(temp1, &return_true);
2148
2149 // Don't overwrite input registers
2150 __ Move(TMP, str);
2151 __ Move(temp3, arg);
2152
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002153 // Assertions that must hold in order to compare strings 4 bytes at a time.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002154 DCHECK_ALIGNED(value_offset, 4);
2155 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
2156
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002157 // For string compression, calculate the number of bytes to compare (not chars).
2158 if (mirror::kUseStringCompression) {
2159 // Extract compression flag.
2160 if (IsR2OrNewer()) {
2161 __ Ext(temp2, temp1, 0, 1);
2162 } else {
2163 __ Sll(temp2, temp1, 31);
2164 __ Srl(temp2, temp2, 31);
2165 }
2166 __ Srl(temp1, temp1, 1); // Extract length.
2167 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
2168 }
2169
2170 // Loop to compare strings 4 bytes at a time starting at the beginning of the string.
2171 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002172 __ Bind(&loop);
2173 __ Lw(out, TMP, value_offset);
2174 __ Lw(temp2, temp3, value_offset);
2175 __ Bne(out, temp2, &return_false);
2176 __ Addiu(TMP, TMP, 4);
2177 __ Addiu(temp3, temp3, 4);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002178 // With string compression, we have compared 4 bytes, otherwise 2 chars.
2179 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -4 : -2);
Chris Larsen16ba2b42015-11-02 10:58:31 -08002180 __ Bgtz(temp1, &loop);
2181
2182 // Return true and exit the function.
2183 // If loop does not result in returning false, we return true.
2184 __ Bind(&return_true);
2185 __ LoadConst32(out, 1);
2186 __ B(&end);
2187
2188 // Return false and exit the function.
2189 __ Bind(&return_false);
2190 __ LoadConst32(out, 0);
2191 __ Bind(&end);
2192}
2193
Chris Larsencf283da2016-01-19 16:45:35 -08002194static void GenerateStringIndexOf(HInvoke* invoke,
2195 bool start_at_zero,
2196 MipsAssembler* assembler,
2197 CodeGeneratorMIPS* codegen,
2198 ArenaAllocator* allocator) {
2199 LocationSummary* locations = invoke->GetLocations();
2200 Register tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<Register>() : TMP;
2201
2202 // Note that the null check must have been done earlier.
2203 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2204
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002205 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
2206 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsencf283da2016-01-19 16:45:35 -08002207 SlowPathCodeMIPS* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002208 HInstruction* code_point = invoke->InputAt(1);
2209 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01002210 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsencf283da2016-01-19 16:45:35 -08002211 // Always needs the slow-path. We could directly dispatch to it,
2212 // but this case should be rare, so for simplicity just put the
2213 // full slow-path down and branch unconditionally.
2214 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke);
2215 codegen->AddSlowPath(slow_path);
2216 __ B(slow_path->GetEntryLabel());
2217 __ Bind(slow_path->GetExitLabel());
2218 return;
2219 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002220 } else if (code_point->GetType() != Primitive::kPrimChar) {
Chris Larsencf283da2016-01-19 16:45:35 -08002221 Register char_reg = locations->InAt(1).AsRegister<Register>();
2222 // The "bltu" conditional branch tests to see if the character value
2223 // fits in a valid 16-bit (MIPS halfword) value. If it doesn't then
2224 // the character being searched for, if it exists in the string, is
2225 // encoded using UTF-16 and stored in the string as two (16-bit)
2226 // halfwords. Currently the assembly code used to implement this
2227 // intrinsic doesn't support searching for a character stored as
2228 // two halfwords so we fallback to using the generic implementation
2229 // of indexOf().
2230 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
2231 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke);
2232 codegen->AddSlowPath(slow_path);
2233 __ Bltu(tmp_reg, char_reg, slow_path->GetEntryLabel());
2234 }
2235
2236 if (start_at_zero) {
2237 DCHECK_EQ(tmp_reg, A2);
2238 // Start-index = 0.
2239 __ Clear(tmp_reg);
2240 }
2241
Serban Constantinescufca16662016-07-14 09:21:59 +01002242 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002243 if (slow_path != nullptr) {
2244 __ Bind(slow_path->GetExitLabel());
2245 }
2246}
2247
2248// int java.lang.String.indexOf(int ch)
2249void IntrinsicLocationsBuilderMIPS::VisitStringIndexOf(HInvoke* invoke) {
2250 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002251 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002252 kIntrinsified);
2253 // We have a hand-crafted assembly stub that follows the runtime
2254 // calling convention. So it's best to align the inputs accordingly.
2255 InvokeRuntimeCallingConvention calling_convention;
2256 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2257 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2258 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2259 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2260
2261 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
2262 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2263}
2264
2265void IntrinsicCodeGeneratorMIPS::VisitStringIndexOf(HInvoke* invoke) {
2266 GenerateStringIndexOf(invoke,
2267 /* start_at_zero */ true,
2268 GetAssembler(),
2269 codegen_,
2270 GetAllocator());
2271}
2272
2273// int java.lang.String.indexOf(int ch, int fromIndex)
2274void IntrinsicLocationsBuilderMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
2275 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002276 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002277 kIntrinsified);
2278 // We have a hand-crafted assembly stub that follows the runtime
2279 // calling convention. So it's best to align the inputs accordingly.
2280 InvokeRuntimeCallingConvention calling_convention;
2281 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2282 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2283 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2284 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2285 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2286
2287 // Need a temp for slow-path codepoint compare.
2288 locations->AddTemp(Location::RequiresRegister());
2289}
2290
2291void IntrinsicCodeGeneratorMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
2292 GenerateStringIndexOf(invoke,
2293 /* start_at_zero */ false,
2294 GetAssembler(),
2295 codegen_,
2296 GetAllocator());
2297}
2298
2299// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
2300void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
2301 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002302 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002303 kIntrinsified);
2304 InvokeRuntimeCallingConvention calling_convention;
2305 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2306 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2307 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2308 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
2309 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2310 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2311}
2312
2313void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
2314 MipsAssembler* assembler = GetAssembler();
2315 LocationSummary* locations = invoke->GetLocations();
2316
2317 Register byte_array = locations->InAt(0).AsRegister<Register>();
2318 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2319 codegen_->AddSlowPath(slow_path);
2320 __ Beqz(byte_array, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002321 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002322 __ Bind(slow_path->GetExitLabel());
2323}
2324
2325// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
2326void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
2327 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002328 LocationSummary::kCallOnMainOnly,
Chris Larsencf283da2016-01-19 16:45:35 -08002329 kIntrinsified);
2330 InvokeRuntimeCallingConvention calling_convention;
2331 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2332 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2333 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2334 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2335 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2336}
2337
2338void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
Chris Larsencf283da2016-01-19 16:45:35 -08002339 // No need to emit code checking whether `locations->InAt(2)` is a null
2340 // pointer, as callers of the native method
2341 //
2342 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
2343 //
2344 // all include a null check on `data` before calling that method.
Serban Constantinescufca16662016-07-14 09:21:59 +01002345 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08002346}
2347
2348// java.lang.StringFactory.newStringFromString(String toCopy)
2349void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
2350 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002351 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002352 kIntrinsified);
2353 InvokeRuntimeCallingConvention calling_convention;
2354 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2355 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2356 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2357}
2358
2359void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
2360 MipsAssembler* assembler = GetAssembler();
2361 LocationSummary* locations = invoke->GetLocations();
2362
2363 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
2364 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2365 codegen_->AddSlowPath(slow_path);
2366 __ Beqz(string_to_copy, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002367 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08002368 __ Bind(slow_path->GetExitLabel());
2369}
2370
Chris Larsen2714fe62016-02-11 14:23:53 -08002371static void GenIsInfinite(LocationSummary* locations,
2372 const Primitive::Type type,
2373 const bool isR6,
2374 MipsAssembler* assembler) {
2375 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2376 Register out = locations->Out().AsRegister<Register>();
2377
2378 DCHECK(type == Primitive::kPrimFloat || type == Primitive::kPrimDouble);
2379
2380 if (isR6) {
2381 if (type == Primitive::kPrimDouble) {
2382 __ ClassD(FTMP, in);
2383 } else {
2384 __ ClassS(FTMP, in);
2385 }
2386 __ Mfc1(out, FTMP);
2387 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
2388 __ Sltu(out, ZERO, out);
2389 } else {
2390 // If one, or more, of the exponent bits is zero, then the number can't be infinite.
2391 if (type == Primitive::kPrimDouble) {
2392 __ MoveFromFpuHigh(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002393 __ LoadConst32(AT, High32Bits(kPositiveInfinityDouble));
Chris Larsen2714fe62016-02-11 14:23:53 -08002394 } else {
2395 __ Mfc1(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002396 __ LoadConst32(AT, kPositiveInfinityFloat);
Chris Larsen2714fe62016-02-11 14:23:53 -08002397 }
2398 __ Xor(TMP, TMP, AT);
2399
2400 __ Sll(TMP, TMP, 1);
2401
2402 if (type == Primitive::kPrimDouble) {
2403 __ Mfc1(AT, in);
2404 __ Or(TMP, TMP, AT);
2405 }
2406 // If any of the significand bits are one, then the number is not infinite.
2407 __ Sltiu(out, TMP, 1);
2408 }
2409}
2410
2411// boolean java.lang.Float.isInfinite(float)
2412void IntrinsicLocationsBuilderMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
2413 CreateFPToIntLocations(arena_, invoke);
2414}
2415
2416void IntrinsicCodeGeneratorMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
2417 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimFloat, IsR6(), GetAssembler());
2418}
2419
2420// boolean java.lang.Double.isInfinite(double)
2421void IntrinsicLocationsBuilderMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
2422 CreateFPToIntLocations(arena_, invoke);
2423}
2424
2425void IntrinsicCodeGeneratorMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
2426 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimDouble, IsR6(), GetAssembler());
2427}
2428
Chris Larsen97759342016-02-16 17:10:40 -08002429static void GenHighestOneBit(LocationSummary* locations,
2430 const Primitive::Type type,
2431 bool isR6,
2432 MipsAssembler* assembler) {
2433 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2434
2435 if (type == Primitive::kPrimLong) {
2436 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2437 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2438 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
2439 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
2440
2441 if (isR6) {
2442 __ ClzR6(TMP, in_hi);
2443 } else {
2444 __ ClzR2(TMP, in_hi);
2445 }
2446 __ LoadConst32(AT, 0x80000000);
2447 __ Srlv(out_hi, AT, TMP);
2448 __ And(out_hi, out_hi, in_hi);
2449 if (isR6) {
2450 __ ClzR6(TMP, in_lo);
2451 } else {
2452 __ ClzR2(TMP, in_lo);
2453 }
2454 __ Srlv(out_lo, AT, TMP);
2455 __ And(out_lo, out_lo, in_lo);
2456 if (isR6) {
2457 __ Seleqz(out_lo, out_lo, out_hi);
2458 } else {
2459 __ Movn(out_lo, ZERO, out_hi);
2460 }
2461 } else {
2462 Register in = locations->InAt(0).AsRegister<Register>();
2463 Register out = locations->Out().AsRegister<Register>();
2464
2465 if (isR6) {
2466 __ ClzR6(TMP, in);
2467 } else {
2468 __ ClzR2(TMP, in);
2469 }
2470 __ LoadConst32(AT, 0x80000000);
2471 __ Srlv(AT, AT, TMP); // Srlv shifts in the range of [0;31] bits (lower 5 bits of arg).
2472 __ And(out, AT, in); // So this is required for 0 (=shift by 32).
2473 }
2474}
2475
2476// int java.lang.Integer.highestOneBit(int)
2477void IntrinsicLocationsBuilderMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
2478 CreateIntToIntLocations(arena_, invoke);
2479}
2480
2481void IntrinsicCodeGeneratorMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
2482 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
2483}
2484
2485// long java.lang.Long.highestOneBit(long)
2486void IntrinsicLocationsBuilderMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
2487 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
2488}
2489
2490void IntrinsicCodeGeneratorMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
2491 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
2492}
2493
2494static void GenLowestOneBit(LocationSummary* locations,
2495 const Primitive::Type type,
2496 bool isR6,
2497 MipsAssembler* assembler) {
2498 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2499
2500 if (type == Primitive::kPrimLong) {
2501 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2502 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2503 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
2504 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
2505
2506 __ Subu(TMP, ZERO, in_lo);
2507 __ And(out_lo, TMP, in_lo);
2508 __ Subu(TMP, ZERO, in_hi);
2509 __ And(out_hi, TMP, in_hi);
2510 if (isR6) {
2511 __ Seleqz(out_hi, out_hi, out_lo);
2512 } else {
2513 __ Movn(out_hi, ZERO, out_lo);
2514 }
2515 } else {
2516 Register in = locations->InAt(0).AsRegister<Register>();
2517 Register out = locations->Out().AsRegister<Register>();
2518
2519 __ Subu(TMP, ZERO, in);
2520 __ And(out, TMP, in);
2521 }
2522}
2523
2524// int java.lang.Integer.lowestOneBit(int)
2525void IntrinsicLocationsBuilderMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
2526 CreateIntToIntLocations(arena_, invoke);
2527}
2528
2529void IntrinsicCodeGeneratorMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
2530 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
2531}
2532
2533// long java.lang.Long.lowestOneBit(long)
2534void IntrinsicLocationsBuilderMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
2535 CreateIntToIntLocations(arena_, invoke);
2536}
2537
2538void IntrinsicCodeGeneratorMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
2539 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
2540}
2541
Chris Larsenf09d5322016-04-22 12:06:34 -07002542// int java.lang.Math.round(float)
2543void IntrinsicLocationsBuilderMIPS::VisitMathRoundFloat(HInvoke* invoke) {
2544 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2545 LocationSummary::kNoCall,
2546 kIntrinsified);
2547 locations->SetInAt(0, Location::RequiresFpuRegister());
2548 locations->AddTemp(Location::RequiresFpuRegister());
2549 locations->SetOut(Location::RequiresRegister());
2550}
2551
2552void IntrinsicCodeGeneratorMIPS::VisitMathRoundFloat(HInvoke* invoke) {
2553 LocationSummary* locations = invoke->GetLocations();
2554 MipsAssembler* assembler = GetAssembler();
2555 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2556 FRegister half = locations->GetTemp(0).AsFpuRegister<FRegister>();
2557 Register out = locations->Out().AsRegister<Register>();
2558
2559 MipsLabel done;
Chris Larsenf09d5322016-04-22 12:06:34 -07002560
Chris Larsenf09d5322016-04-22 12:06:34 -07002561 if (IsR6()) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002562 // out = floor(in);
2563 //
2564 // if (out != MAX_VALUE && out != MIN_VALUE) {
2565 // TMP = ((in - out) >= 0.5) ? 1 : 0;
2566 // return out += TMP;
2567 // }
2568 // return out;
Chris Larsenf09d5322016-04-22 12:06:34 -07002569
Lena Djokicf4e23a82017-05-09 15:43:45 +02002570 // out = floor(in);
2571 __ FloorWS(FTMP, in);
2572 __ Mfc1(out, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002573
Lena Djokicf4e23a82017-05-09 15:43:45 +02002574 // if (out != MAX_VALUE && out != MIN_VALUE)
2575 __ Addiu(TMP, out, 1);
2576 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
2577 // or out - 0x7FFF FFFF.
2578 // IOW, TMP = 1 if out = Int.MIN_VALUE
2579 // or TMP = 0 if out = Int.MAX_VALUE.
2580 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
2581 // or out = Int.MAX_VALUE.
2582 __ Beqz(TMP, &done);
Chris Larsenf09d5322016-04-22 12:06:34 -07002583
Lena Djokicf4e23a82017-05-09 15:43:45 +02002584 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2585 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2586 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2587 __ SubS(FTMP, in, FTMP);
2588 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002589
Chris Larsenf09d5322016-04-22 12:06:34 -07002590 __ CmpLeS(FTMP, half, FTMP);
Chris Larsen07f712f2016-06-10 16:06:02 -07002591 __ Mfc1(TMP, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002592
2593 // Return out -= TMP.
2594 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002595 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002596 // if (in.isNaN) {
2597 // return 0;
2598 // }
2599 //
2600 // out = floor.w.s(in);
2601 //
2602 // /*
2603 // * This "if" statement is only needed for the pre-R6 version of floor.w.s
2604 // * which outputs Integer.MAX_VALUE for negative numbers with magnitudes
2605 // * too large to fit in a 32-bit integer.
2606 // */
2607 // if (out == Integer.MAX_VALUE) {
2608 // TMP = (in < 0.0f) ? 1 : 0;
2609 // /*
2610 // * If TMP is 1, then adding it to out will wrap its value from
2611 // * Integer.MAX_VALUE to Integer.MIN_VALUE.
2612 // */
2613 // return out += TMP;
2614 // }
2615 //
2616 // /*
2617 // * For negative values not handled by the previous "if" statement the
2618 // * test here will correctly set the value of TMP.
2619 // */
2620 // TMP = ((in - out) >= 0.5f) ? 1 : 0;
2621 // return out += TMP;
2622
2623 MipsLabel finite;
2624 MipsLabel add;
2625
2626 // Test for NaN.
2627 __ CunS(in, in);
2628
2629 // Return zero for NaN.
2630 __ Move(out, ZERO);
2631 __ Bc1t(&done);
2632
2633 // out = floor(in);
2634 __ FloorWS(FTMP, in);
2635 __ Mfc1(out, FTMP);
2636
2637 __ LoadConst32(TMP, -1);
2638
2639 // TMP = (out = java.lang.Integer.MAX_VALUE) ? -1 : 0;
2640 __ LoadConst32(AT, std::numeric_limits<int32_t>::max());
2641 __ Bne(AT, out, &finite);
2642
2643 __ Mtc1(ZERO, FTMP);
2644 __ ColtS(in, FTMP);
2645
2646 __ B(&add);
2647
2648 __ Bind(&finite);
2649
2650 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2651 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2652 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2653 __ SubS(FTMP, in, FTMP);
2654 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002655 __ ColeS(half, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002656
Lena Djokicf4e23a82017-05-09 15:43:45 +02002657 __ Bind(&add);
Chris Larsenf09d5322016-04-22 12:06:34 -07002658
Chris Larsenf09d5322016-04-22 12:06:34 -07002659 __ Movf(TMP, ZERO);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002660
2661 // Return out -= TMP.
2662 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002663 }
Chris Larsenf09d5322016-04-22 12:06:34 -07002664 __ Bind(&done);
2665}
2666
Chris Larsen692235e2016-11-21 16:04:53 -08002667// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
2668void IntrinsicLocationsBuilderMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2669 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Chris Larsenfe4ff442017-03-23 11:25:12 -07002670 LocationSummary::kNoCall,
Chris Larsen692235e2016-11-21 16:04:53 -08002671 kIntrinsified);
2672 locations->SetInAt(0, Location::RequiresRegister());
2673 locations->SetInAt(1, Location::RequiresRegister());
2674 locations->SetInAt(2, Location::RequiresRegister());
2675 locations->SetInAt(3, Location::RequiresRegister());
2676 locations->SetInAt(4, Location::RequiresRegister());
2677
Chris Larsenfe4ff442017-03-23 11:25:12 -07002678 locations->AddTemp(Location::RequiresRegister());
2679 locations->AddTemp(Location::RequiresRegister());
2680 locations->AddTemp(Location::RequiresRegister());
Chris Larsen692235e2016-11-21 16:04:53 -08002681}
2682
2683void IntrinsicCodeGeneratorMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2684 MipsAssembler* assembler = GetAssembler();
2685 LocationSummary* locations = invoke->GetLocations();
2686
2687 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2688 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2689 DCHECK_EQ(char_size, 2u);
2690 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
2691
2692 Register srcObj = locations->InAt(0).AsRegister<Register>();
2693 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2694 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2695 Register dstObj = locations->InAt(3).AsRegister<Register>();
2696 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2697
2698 Register dstPtr = locations->GetTemp(0).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002699 Register srcPtr = locations->GetTemp(1).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002700 Register numChrs = locations->GetTemp(2).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002701
2702 MipsLabel done;
Chris Larsenfe4ff442017-03-23 11:25:12 -07002703 MipsLabel loop;
Chris Larsen692235e2016-11-21 16:04:53 -08002704
2705 // Location of data in char array buffer.
2706 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2707
2708 // Get offset of value field within a string object.
2709 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
2710
2711 __ Beq(srcEnd, srcBegin, &done); // No characters to move.
2712
2713 // Calculate number of characters to be copied.
2714 __ Subu(numChrs, srcEnd, srcBegin);
2715
2716 // Calculate destination address.
2717 __ Addiu(dstPtr, dstObj, data_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002718 __ ShiftAndAdd(dstPtr, dstBegin, dstPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002719
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002720 if (mirror::kUseStringCompression) {
2721 MipsLabel uncompressed_copy, compressed_loop;
2722 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2723 // Load count field and extract compression flag.
2724 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2725 __ Sll(TMP, TMP, 31);
2726
Chris Larsenfe4ff442017-03-23 11:25:12 -07002727 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002728 __ Bnez(TMP, &uncompressed_copy);
2729
2730 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2731 __ Addu(srcPtr, srcObj, srcBegin);
2732 __ Bind(&compressed_loop);
2733 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2734 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2735 __ Addiu(numChrs, numChrs, -1);
2736 __ Addiu(srcPtr, srcPtr, 1);
2737 __ Addiu(dstPtr, dstPtr, 2);
2738 __ Bnez(numChrs, &compressed_loop);
2739
2740 __ B(&done);
2741 __ Bind(&uncompressed_copy);
2742 }
2743
Chris Larsen692235e2016-11-21 16:04:53 -08002744 // Calculate source address.
2745 __ Addiu(srcPtr, srcObj, value_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002746 __ ShiftAndAdd(srcPtr, srcBegin, srcPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002747
Chris Larsenfe4ff442017-03-23 11:25:12 -07002748 __ Bind(&loop);
2749 __ Lh(AT, srcPtr, 0);
2750 __ Addiu(numChrs, numChrs, -1);
2751 __ Addiu(srcPtr, srcPtr, char_size);
2752 __ Sh(AT, dstPtr, 0);
2753 __ Addiu(dstPtr, dstPtr, char_size);
2754 __ Bnez(numChrs, &loop);
Chris Larsen692235e2016-11-21 16:04:53 -08002755
2756 __ Bind(&done);
2757}
2758
Chris Larsenb9005fa2017-03-24 12:11:54 -07002759static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2760 LocationSummary* locations = new (arena) LocationSummary(invoke,
2761 LocationSummary::kCallOnMainOnly,
2762 kIntrinsified);
2763 InvokeRuntimeCallingConvention calling_convention;
2764
2765 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2766 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2767}
2768
2769static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2770 LocationSummary* locations = new (arena) LocationSummary(invoke,
2771 LocationSummary::kCallOnMainOnly,
2772 kIntrinsified);
2773 InvokeRuntimeCallingConvention calling_convention;
2774
2775 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2776 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2777 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2778}
2779
2780static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorMIPS* codegen, QuickEntrypointEnum entry) {
2781 LocationSummary* locations = invoke->GetLocations();
2782 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2783 DCHECK_EQ(in, F12);
2784 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2785 DCHECK_EQ(out, F0);
2786
2787 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2788}
2789
2790static void GenFPFPToFPCall(HInvoke* invoke,
2791 CodeGeneratorMIPS* codegen,
2792 QuickEntrypointEnum entry) {
2793 LocationSummary* locations = invoke->GetLocations();
2794 FRegister in0 = locations->InAt(0).AsFpuRegister<FRegister>();
2795 DCHECK_EQ(in0, F12);
2796 FRegister in1 = locations->InAt(1).AsFpuRegister<FRegister>();
2797 DCHECK_EQ(in1, F14);
2798 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2799 DCHECK_EQ(out, F0);
2800
2801 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2802}
2803
2804// static double java.lang.Math.cos(double a)
2805void IntrinsicLocationsBuilderMIPS::VisitMathCos(HInvoke* invoke) {
2806 CreateFPToFPCallLocations(arena_, invoke);
2807}
2808
2809void IntrinsicCodeGeneratorMIPS::VisitMathCos(HInvoke* invoke) {
2810 GenFPToFPCall(invoke, codegen_, kQuickCos);
2811}
2812
2813// static double java.lang.Math.sin(double a)
2814void IntrinsicLocationsBuilderMIPS::VisitMathSin(HInvoke* invoke) {
2815 CreateFPToFPCallLocations(arena_, invoke);
2816}
2817
2818void IntrinsicCodeGeneratorMIPS::VisitMathSin(HInvoke* invoke) {
2819 GenFPToFPCall(invoke, codegen_, kQuickSin);
2820}
2821
2822// static double java.lang.Math.acos(double a)
2823void IntrinsicLocationsBuilderMIPS::VisitMathAcos(HInvoke* invoke) {
2824 CreateFPToFPCallLocations(arena_, invoke);
2825}
2826
2827void IntrinsicCodeGeneratorMIPS::VisitMathAcos(HInvoke* invoke) {
2828 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2829}
2830
2831// static double java.lang.Math.asin(double a)
2832void IntrinsicLocationsBuilderMIPS::VisitMathAsin(HInvoke* invoke) {
2833 CreateFPToFPCallLocations(arena_, invoke);
2834}
2835
2836void IntrinsicCodeGeneratorMIPS::VisitMathAsin(HInvoke* invoke) {
2837 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2838}
2839
2840// static double java.lang.Math.atan(double a)
2841void IntrinsicLocationsBuilderMIPS::VisitMathAtan(HInvoke* invoke) {
2842 CreateFPToFPCallLocations(arena_, invoke);
2843}
2844
2845void IntrinsicCodeGeneratorMIPS::VisitMathAtan(HInvoke* invoke) {
2846 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2847}
2848
2849// static double java.lang.Math.atan2(double y, double x)
2850void IntrinsicLocationsBuilderMIPS::VisitMathAtan2(HInvoke* invoke) {
2851 CreateFPFPToFPCallLocations(arena_, invoke);
2852}
2853
2854void IntrinsicCodeGeneratorMIPS::VisitMathAtan2(HInvoke* invoke) {
2855 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2856}
2857
2858// static double java.lang.Math.cbrt(double a)
2859void IntrinsicLocationsBuilderMIPS::VisitMathCbrt(HInvoke* invoke) {
2860 CreateFPToFPCallLocations(arena_, invoke);
2861}
2862
2863void IntrinsicCodeGeneratorMIPS::VisitMathCbrt(HInvoke* invoke) {
2864 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2865}
2866
2867// static double java.lang.Math.cosh(double x)
2868void IntrinsicLocationsBuilderMIPS::VisitMathCosh(HInvoke* invoke) {
2869 CreateFPToFPCallLocations(arena_, invoke);
2870}
2871
2872void IntrinsicCodeGeneratorMIPS::VisitMathCosh(HInvoke* invoke) {
2873 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2874}
2875
2876// static double java.lang.Math.exp(double a)
2877void IntrinsicLocationsBuilderMIPS::VisitMathExp(HInvoke* invoke) {
2878 CreateFPToFPCallLocations(arena_, invoke);
2879}
2880
2881void IntrinsicCodeGeneratorMIPS::VisitMathExp(HInvoke* invoke) {
2882 GenFPToFPCall(invoke, codegen_, kQuickExp);
2883}
2884
2885// static double java.lang.Math.expm1(double x)
2886void IntrinsicLocationsBuilderMIPS::VisitMathExpm1(HInvoke* invoke) {
2887 CreateFPToFPCallLocations(arena_, invoke);
2888}
2889
2890void IntrinsicCodeGeneratorMIPS::VisitMathExpm1(HInvoke* invoke) {
2891 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2892}
2893
2894// static double java.lang.Math.hypot(double x, double y)
2895void IntrinsicLocationsBuilderMIPS::VisitMathHypot(HInvoke* invoke) {
2896 CreateFPFPToFPCallLocations(arena_, invoke);
2897}
2898
2899void IntrinsicCodeGeneratorMIPS::VisitMathHypot(HInvoke* invoke) {
2900 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2901}
2902
2903// static double java.lang.Math.log(double a)
2904void IntrinsicLocationsBuilderMIPS::VisitMathLog(HInvoke* invoke) {
2905 CreateFPToFPCallLocations(arena_, invoke);
2906}
2907
2908void IntrinsicCodeGeneratorMIPS::VisitMathLog(HInvoke* invoke) {
2909 GenFPToFPCall(invoke, codegen_, kQuickLog);
2910}
2911
2912// static double java.lang.Math.log10(double x)
2913void IntrinsicLocationsBuilderMIPS::VisitMathLog10(HInvoke* invoke) {
2914 CreateFPToFPCallLocations(arena_, invoke);
2915}
2916
2917void IntrinsicCodeGeneratorMIPS::VisitMathLog10(HInvoke* invoke) {
2918 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2919}
2920
2921// static double java.lang.Math.nextAfter(double start, double direction)
2922void IntrinsicLocationsBuilderMIPS::VisitMathNextAfter(HInvoke* invoke) {
2923 CreateFPFPToFPCallLocations(arena_, invoke);
2924}
2925
2926void IntrinsicCodeGeneratorMIPS::VisitMathNextAfter(HInvoke* invoke) {
2927 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2928}
2929
2930// static double java.lang.Math.sinh(double x)
2931void IntrinsicLocationsBuilderMIPS::VisitMathSinh(HInvoke* invoke) {
2932 CreateFPToFPCallLocations(arena_, invoke);
2933}
2934
2935void IntrinsicCodeGeneratorMIPS::VisitMathSinh(HInvoke* invoke) {
2936 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2937}
2938
2939// static double java.lang.Math.tan(double a)
2940void IntrinsicLocationsBuilderMIPS::VisitMathTan(HInvoke* invoke) {
2941 CreateFPToFPCallLocations(arena_, invoke);
2942}
2943
2944void IntrinsicCodeGeneratorMIPS::VisitMathTan(HInvoke* invoke) {
2945 GenFPToFPCall(invoke, codegen_, kQuickTan);
2946}
2947
2948// static double java.lang.Math.tanh(double x)
2949void IntrinsicLocationsBuilderMIPS::VisitMathTanh(HInvoke* invoke) {
2950 CreateFPToFPCallLocations(arena_, invoke);
2951}
2952
2953void IntrinsicCodeGeneratorMIPS::VisitMathTanh(HInvoke* invoke) {
2954 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2955}
2956
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002957// static void java.lang.System.arraycopy(Object src, int srcPos,
2958// Object dest, int destPos,
2959// int length)
2960void IntrinsicLocationsBuilderMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
2961 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2962 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2963 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2964
2965 // As long as we are checking, we might as well check to see if the src and dest
2966 // positions are >= 0.
2967 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2968 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2969 // We will have to fail anyways.
2970 return;
2971 }
2972
2973 // And since we are already checking, check the length too.
2974 if (length != nullptr) {
2975 int32_t len = length->GetValue();
2976 if (len < 0) {
2977 // Just call as normal.
2978 return;
2979 }
2980 }
2981
2982 // Okay, it is safe to generate inline code.
2983 LocationSummary* locations =
2984 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
2985 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2986 locations->SetInAt(0, Location::RequiresRegister());
2987 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2988 locations->SetInAt(2, Location::RequiresRegister());
2989 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2990 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2991
2992 locations->AddTemp(Location::RequiresRegister());
2993 locations->AddTemp(Location::RequiresRegister());
2994 locations->AddTemp(Location::RequiresRegister());
2995}
2996
2997// Utility routine to verify that "length(input) - pos >= length"
2998static void EnoughItems(MipsAssembler* assembler,
2999 Register length_input_minus_pos,
3000 Location length,
3001 SlowPathCodeMIPS* slow_path) {
3002 if (length.IsConstant()) {
3003 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
3004
3005 if (IsInt<16>(length_constant)) {
3006 __ Slti(TMP, length_input_minus_pos, length_constant);
3007 __ Bnez(TMP, slow_path->GetEntryLabel());
3008 } else {
3009 __ LoadConst32(TMP, length_constant);
3010 __ Blt(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
3011 }
3012 } else {
3013 __ Blt(length_input_minus_pos, length.AsRegister<Register>(), slow_path->GetEntryLabel());
3014 }
3015}
3016
3017static void CheckPosition(MipsAssembler* assembler,
3018 Location pos,
3019 Register input,
3020 Location length,
3021 SlowPathCodeMIPS* slow_path,
3022 bool length_is_input_length = false) {
3023 // Where is the length in the Array?
3024 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
3025
3026 // Calculate length(input) - pos.
3027 if (pos.IsConstant()) {
3028 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
3029 if (pos_const == 0) {
3030 if (!length_is_input_length) {
3031 // Check that length(input) >= length.
3032 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3033 EnoughItems(assembler, AT, length, slow_path);
3034 }
3035 } else {
3036 // Check that (length(input) - pos) >= zero.
3037 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3038 DCHECK_GT(pos_const, 0);
3039 __ Addiu32(AT, AT, -pos_const, TMP);
3040 __ Bltz(AT, slow_path->GetEntryLabel());
3041
3042 // Verify that (length(input) - pos) >= length.
3043 EnoughItems(assembler, AT, length, slow_path);
3044 }
3045 } else if (length_is_input_length) {
3046 // The only way the copy can succeed is if pos is zero.
3047 Register pos_reg = pos.AsRegister<Register>();
3048 __ Bnez(pos_reg, slow_path->GetEntryLabel());
3049 } else {
3050 // Verify that pos >= 0.
3051 Register pos_reg = pos.AsRegister<Register>();
3052 __ Bltz(pos_reg, slow_path->GetEntryLabel());
3053
3054 // Check that (length(input) - pos) >= zero.
3055 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3056 __ Subu(AT, AT, pos_reg);
3057 __ Bltz(AT, slow_path->GetEntryLabel());
3058
3059 // Verify that (length(input) - pos) >= length.
3060 EnoughItems(assembler, AT, length, slow_path);
3061 }
3062}
3063
3064void IntrinsicCodeGeneratorMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
3065 MipsAssembler* assembler = GetAssembler();
3066 LocationSummary* locations = invoke->GetLocations();
3067
3068 Register src = locations->InAt(0).AsRegister<Register>();
3069 Location src_pos = locations->InAt(1);
3070 Register dest = locations->InAt(2).AsRegister<Register>();
3071 Location dest_pos = locations->InAt(3);
3072 Location length = locations->InAt(4);
3073
3074 MipsLabel loop;
3075
3076 Register dest_base = locations->GetTemp(0).AsRegister<Register>();
3077 Register src_base = locations->GetTemp(1).AsRegister<Register>();
3078 Register count = locations->GetTemp(2).AsRegister<Register>();
3079
3080 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
3081 codegen_->AddSlowPath(slow_path);
3082
3083 // Bail out if the source and destination are the same (to handle overlap).
3084 __ Beq(src, dest, slow_path->GetEntryLabel());
3085
3086 // Bail out if the source is null.
3087 __ Beqz(src, slow_path->GetEntryLabel());
3088
3089 // Bail out if the destination is null.
3090 __ Beqz(dest, slow_path->GetEntryLabel());
3091
3092 // Load length into register for count.
3093 if (length.IsConstant()) {
3094 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
3095 } else {
3096 // If the length is negative, bail out.
3097 // We have already checked in the LocationsBuilder for the constant case.
3098 __ Bltz(length.AsRegister<Register>(), slow_path->GetEntryLabel());
3099
3100 __ Move(count, length.AsRegister<Register>());
3101 }
3102
3103 // Validity checks: source.
3104 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
3105
3106 // Validity checks: dest.
3107 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
3108
3109 // If count is zero, we're done.
3110 __ Beqz(count, slow_path->GetExitLabel());
3111
3112 // Okay, everything checks out. Finally time to do the copy.
3113 // Check assumption that sizeof(Char) is 2 (used in scaling below).
3114 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
3115 DCHECK_EQ(char_size, 2u);
3116
3117 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
3118
3119 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
3120
3121 // Calculate source and destination addresses.
3122 if (src_pos.IsConstant()) {
3123 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
3124
3125 __ Addiu32(src_base, src, data_offset + char_size * src_pos_const, TMP);
3126 } else {
3127 __ Addiu32(src_base, src, data_offset, TMP);
3128 __ ShiftAndAdd(src_base, src_pos.AsRegister<Register>(), src_base, char_shift);
3129 }
3130 if (dest_pos.IsConstant()) {
3131 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3132
3133 __ Addiu32(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
3134 } else {
3135 __ Addiu32(dest_base, dest, data_offset, TMP);
3136 __ ShiftAndAdd(dest_base, dest_pos.AsRegister<Register>(), dest_base, char_shift);
3137 }
3138
3139 __ Bind(&loop);
3140 __ Lh(TMP, src_base, 0);
3141 __ Addiu(src_base, src_base, char_size);
3142 __ Addiu(count, count, -1);
3143 __ Sh(TMP, dest_base, 0);
3144 __ Addiu(dest_base, dest_base, char_size);
3145 __ Bnez(count, &loop);
3146
3147 __ Bind(slow_path->GetExitLabel());
3148}
3149
Chris Larsen5633ce72017-04-10 15:47:40 -07003150// long java.lang.Integer.valueOf(long)
3151void IntrinsicLocationsBuilderMIPS::VisitIntegerValueOf(HInvoke* invoke) {
3152 InvokeRuntimeCallingConvention calling_convention;
3153 IntrinsicVisitor::ComputeIntegerValueOfLocations(
3154 invoke,
3155 codegen_,
3156 calling_convention.GetReturnLocation(Primitive::kPrimNot),
3157 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3158}
3159
3160void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) {
3161 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
3162 LocationSummary* locations = invoke->GetLocations();
3163 MipsAssembler* assembler = GetAssembler();
3164 InstructionCodeGeneratorMIPS* icodegen =
3165 down_cast<InstructionCodeGeneratorMIPS*>(codegen_->GetInstructionVisitor());
3166
3167 Register out = locations->Out().AsRegister<Register>();
3168 InvokeRuntimeCallingConvention calling_convention;
3169 if (invoke->InputAt(0)->IsConstant()) {
3170 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
3171 if (value >= info.low && value <= info.high) {
3172 // Just embed the j.l.Integer in the code.
3173 ScopedObjectAccess soa(Thread::Current());
3174 mirror::Object* boxed = info.cache->Get(value + (-info.low));
3175 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
3176 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
3177 __ LoadConst32(out, address);
3178 } else {
3179 // Allocate and initialize a new j.l.Integer.
3180 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
3181 // JIT object table.
3182 uint32_t address =
3183 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3184 __ LoadConst32(calling_convention.GetRegisterAt(0), address);
3185 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3186 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3187 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
3188 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
3189 // one.
3190 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3191 }
3192 } else {
3193 Register in = locations->InAt(0).AsRegister<Register>();
3194 MipsLabel allocate, done;
3195 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
3196
3197 // Is (info.low <= in) && (in <= info.high)?
3198 __ Addiu32(out, in, -info.low);
3199 // As unsigned quantities is out < (info.high - info.low + 1)?
3200 if (IsInt<16>(count)) {
3201 __ Sltiu(AT, out, count);
3202 } else {
3203 __ LoadConst32(AT, count);
3204 __ Sltu(AT, out, AT);
3205 }
3206 // Branch if out >= (info.high - info.low + 1).
3207 // This means that "in" is outside of the range [info.low, info.high].
3208 __ Beqz(AT, &allocate);
3209
3210 // If the value is within the bounds, load the j.l.Integer directly from the array.
3211 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3212 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
3213 __ LoadConst32(TMP, data_offset + address);
3214 __ ShiftAndAdd(out, out, TMP, TIMES_4);
3215 __ Lw(out, out, 0);
3216 __ MaybeUnpoisonHeapReference(out);
3217 __ B(&done);
3218
3219 __ Bind(&allocate);
3220 // Otherwise allocate and initialize a new j.l.Integer.
3221 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3222 __ LoadConst32(calling_convention.GetRegisterAt(0), address);
3223 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3224 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3225 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
3226 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
3227 // one.
3228 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3229 __ Bind(&done);
3230 }
3231}
3232
Chris Larsen2714fe62016-02-11 14:23:53 -08003233// Unimplemented intrinsics.
3234
Aart Bik2f9fcc92016-03-01 15:16:54 -08003235UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil)
3236UNIMPLEMENTED_INTRINSIC(MIPS, MathFloor)
3237UNIMPLEMENTED_INTRINSIC(MIPS, MathRint)
3238UNIMPLEMENTED_INTRINSIC(MIPS, MathRoundDouble)
Alexey Frunze15958152017-02-09 19:08:30 -08003239UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetLongVolatile);
3240UNIMPLEMENTED_INTRINSIC(MIPS, UnsafePutLongVolatile);
Aart Bik2f9fcc92016-03-01 15:16:54 -08003241UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeCASLong)
Chris Larsen701566a2015-10-27 15:29:13 -07003242
Aart Bik2f9fcc92016-03-01 15:16:54 -08003243UNIMPLEMENTED_INTRINSIC(MIPS, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003244UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08003245
Aart Bikff7d89c2016-11-07 08:49:28 -08003246UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOf);
3247UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003248UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferAppend);
3249UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferLength);
3250UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferToString);
3251UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppend);
3252UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderLength);
3253UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003254
Aart Bik0e54c012016-03-04 12:08:31 -08003255// 1.8.
3256UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddInt)
3257UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddLong)
3258UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetInt)
3259UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetLong)
3260UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetObject)
Chris Larsen701566a2015-10-27 15:29:13 -07003261
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003262UNIMPLEMENTED_INTRINSIC(MIPS, ThreadInterrupted)
3263
Aart Bik0e54c012016-03-04 12:08:31 -08003264UNREACHABLE_INTRINSICS(MIPS)
Chris Larsen2714fe62016-02-11 14:23:53 -08003265
Chris Larsen701566a2015-10-27 15:29:13 -07003266#undef __
3267
3268} // namespace mips
3269} // namespace art