blob: 80448f1389b56901d34cac5df4c86f91fa0d00b4 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Chris Larsen5633ce72017-04-10 15:47:40 -070038 : codegen_(codegen), arena_(codegen->GetGraph()->GetArena()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
46 return codegen_->GetGraph()->GetArena();
47}
48
Chris Larsen9701c2e2015-09-04 17:22:47 -070049#define __ codegen->GetAssembler()->
50
51static void MoveFromReturnRegister(Location trg,
52 Primitive::Type type,
53 CodeGeneratorMIPS64* codegen) {
54 if (!trg.IsValid()) {
55 DCHECK_EQ(type, Primitive::kPrimVoid);
56 return;
57 }
58
59 DCHECK_NE(type, Primitive::kPrimVoid);
60
61 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
62 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
63 if (trg_reg != V0) {
64 __ Move(V0, trg_reg);
65 }
66 } else {
67 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
68 if (trg_reg != F0) {
69 if (type == Primitive::kPrimFloat) {
70 __ MovS(F0, trg_reg);
71 } else {
72 __ MovD(F0, trg_reg);
73 }
74 }
75 }
76}
77
78static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
79 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
80 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
81}
82
83// Slow-path for fallback (calling the managed code to handle the
84// intrinsic) in an intrinsified call. This will copy the arguments
85// into the positions for a regular call.
86//
87// Note: The actual parameters are required to be in the locations
88// given by the invoke's location summary. If an intrinsic
89// modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
92 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000093 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
94 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070095
96 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
97 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
98
99 __ Bind(GetEntryLabel());
100
101 SaveLiveRegisters(codegen, invoke_->GetLocations());
102
103 MoveArguments(invoke_, codegen);
104
105 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100106 codegen->GenerateStaticOrDirectCall(
107 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700108 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100109 codegen->GenerateVirtualCall(
110 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700111 }
112
113 // Copy the result back to the expected output.
114 Location out = invoke_->GetLocations()->Out();
115 if (out.IsValid()) {
116 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
117 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
118 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
119 }
120
121 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700122 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700123 }
124
125 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
126
127 private:
128 // The instruction where this slow path is happening.
129 HInvoke* const invoke_;
130
131 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
132};
133
134#undef __
135
Chris Larsen3039e382015-08-26 07:54:08 -0700136bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
137 Dispatch(invoke);
138 LocationSummary* res = invoke->GetLocations();
139 return res != nullptr && res->Intrinsified();
140}
141
142#define __ assembler->
143
144static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
145 LocationSummary* locations = new (arena) LocationSummary(invoke,
146 LocationSummary::kNoCall,
147 kIntrinsified);
148 locations->SetInAt(0, Location::RequiresFpuRegister());
149 locations->SetOut(Location::RequiresRegister());
150}
151
152static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
153 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
154 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
155
156 if (is64bit) {
157 __ Dmfc1(out, in);
158 } else {
159 __ Mfc1(out, in);
160 }
161}
162
163// long java.lang.Double.doubleToRawLongBits(double)
164void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
165 CreateFPToIntLocations(arena_, invoke);
166}
167
168void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000169 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700170}
171
172// int java.lang.Float.floatToRawIntBits(float)
173void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
174 CreateFPToIntLocations(arena_, invoke);
175}
176
177void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000178 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700179}
180
181static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
182 LocationSummary* locations = new (arena) LocationSummary(invoke,
183 LocationSummary::kNoCall,
184 kIntrinsified);
185 locations->SetInAt(0, Location::RequiresRegister());
186 locations->SetOut(Location::RequiresFpuRegister());
187}
188
189static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
190 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
191 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
192
193 if (is64bit) {
194 __ Dmtc1(in, out);
195 } else {
196 __ Mtc1(in, out);
197 }
198}
199
200// double java.lang.Double.longBitsToDouble(long)
201void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
202 CreateIntToFPLocations(arena_, invoke);
203}
204
205void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000206 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700207}
208
209// float java.lang.Float.intBitsToFloat(int)
210void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
211 CreateIntToFPLocations(arena_, invoke);
212}
213
214void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000215 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700216}
217
218static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
219 LocationSummary* locations = new (arena) LocationSummary(invoke,
220 LocationSummary::kNoCall,
221 kIntrinsified);
222 locations->SetInAt(0, Location::RequiresRegister());
223 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
224}
225
226static void GenReverseBytes(LocationSummary* locations,
227 Primitive::Type type,
228 Mips64Assembler* assembler) {
229 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
230 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
231
232 switch (type) {
233 case Primitive::kPrimShort:
234 __ Dsbh(out, in);
235 __ Seh(out, out);
236 break;
237 case Primitive::kPrimInt:
238 __ Rotr(out, in, 16);
239 __ Wsbh(out, out);
240 break;
241 case Primitive::kPrimLong:
242 __ Dsbh(out, in);
243 __ Dshd(out, out);
244 break;
245 default:
246 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
247 UNREACHABLE();
248 }
249}
250
251// int java.lang.Integer.reverseBytes(int)
252void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
253 CreateIntToIntLocations(arena_, invoke);
254}
255
256void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
257 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
258}
259
260// long java.lang.Long.reverseBytes(long)
261void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
262 CreateIntToIntLocations(arena_, invoke);
263}
264
265void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
266 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
267}
268
269// short java.lang.Short.reverseBytes(short)
270void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
271 CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
276}
277
Chris Larsen81284372015-10-21 15:28:53 -0700278static void GenNumberOfLeadingZeroes(LocationSummary* locations,
279 bool is64bit,
280 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700281 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
282 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
283
284 if (is64bit) {
285 __ Dclz(out, in);
286 } else {
287 __ Clz(out, in);
288 }
289}
290
291// int java.lang.Integer.numberOfLeadingZeros(int i)
292void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
293 CreateIntToIntLocations(arena_, invoke);
294}
295
296void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000297 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700298}
299
300// int java.lang.Long.numberOfLeadingZeros(long i)
301void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
302 CreateIntToIntLocations(arena_, invoke);
303}
304
305void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000306 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700307}
308
Chris Larsen81284372015-10-21 15:28:53 -0700309static void GenNumberOfTrailingZeroes(LocationSummary* locations,
310 bool is64bit,
311 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700312 Location in = locations->InAt(0);
313 Location out = locations->Out();
314
315 if (is64bit) {
316 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
317 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
318 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
319 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 } else {
321 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
322 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
323 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
324 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
325 }
326}
327
328// int java.lang.Integer.numberOfTrailingZeros(int i)
329void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000334 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700335}
336
337// int java.lang.Long.numberOfTrailingZeros(long i)
338void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
339 CreateIntToIntLocations(arena_, invoke);
340}
341
342void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000343 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700344}
345
346static void GenReverse(LocationSummary* locations,
347 Primitive::Type type,
348 Mips64Assembler* assembler) {
349 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
350
351 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
352 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
353
354 if (type == Primitive::kPrimInt) {
355 __ Rotr(out, in, 16);
356 __ Wsbh(out, out);
357 __ Bitswap(out, out);
358 } else {
359 __ Dsbh(out, in);
360 __ Dshd(out, out);
361 __ Dbitswap(out, out);
362 }
363}
364
365// int java.lang.Integer.reverse(int)
366void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
367 CreateIntToIntLocations(arena_, invoke);
368}
369
370void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
371 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
372}
373
374// long java.lang.Long.reverse(long)
375void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
376 CreateIntToIntLocations(arena_, invoke);
377}
378
379void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
380 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
381}
382
Chris Larsen0b7ac982015-09-04 12:54:28 -0700383static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
384 LocationSummary* locations = new (arena) LocationSummary(invoke,
385 LocationSummary::kNoCall,
386 kIntrinsified);
387 locations->SetInAt(0, Location::RequiresFpuRegister());
388 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
389}
390
Chris Larsen7fda7852016-04-21 16:00:36 -0700391static void GenBitCount(LocationSummary* locations,
392 const Primitive::Type type,
393 Mips64Assembler* assembler) {
394 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
395 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
396
397 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
398
399 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
400 //
401 // A generalization of the best bit counting method to integers of
402 // bit-widths up to 128 (parameterized by type T) is this:
403 //
404 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
405 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
406 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
407 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
408 //
409 // For comparison, for 32-bit quantities, this algorithm can be executed
410 // using 20 MIPS instructions (the calls to LoadConst32() generate two
411 // machine instructions each for the values being used in this algorithm).
412 // A(n unrolled) loop-based algorithm requires 25 instructions.
413 //
414 // For a 64-bit operand this can be performed in 24 instructions compared
415 // to a(n unrolled) loop based algorithm which requires 38 instructions.
416 //
417 // There are algorithms which are faster in the cases where very few
418 // bits are set but the algorithm here attempts to minimize the total
419 // number of instructions executed even when a large number of bits
420 // are set.
421
422 if (type == Primitive::kPrimInt) {
423 __ Srl(TMP, in, 1);
424 __ LoadConst32(AT, 0x55555555);
425 __ And(TMP, TMP, AT);
426 __ Subu(TMP, in, TMP);
427 __ LoadConst32(AT, 0x33333333);
428 __ And(out, TMP, AT);
429 __ Srl(TMP, TMP, 2);
430 __ And(TMP, TMP, AT);
431 __ Addu(TMP, out, TMP);
432 __ Srl(out, TMP, 4);
433 __ Addu(out, out, TMP);
434 __ LoadConst32(AT, 0x0F0F0F0F);
435 __ And(out, out, AT);
436 __ LoadConst32(TMP, 0x01010101);
437 __ MulR6(out, out, TMP);
438 __ Srl(out, out, 24);
439 } else if (type == Primitive::kPrimLong) {
440 __ Dsrl(TMP, in, 1);
441 __ LoadConst64(AT, 0x5555555555555555L);
442 __ And(TMP, TMP, AT);
443 __ Dsubu(TMP, in, TMP);
444 __ LoadConst64(AT, 0x3333333333333333L);
445 __ And(out, TMP, AT);
446 __ Dsrl(TMP, TMP, 2);
447 __ And(TMP, TMP, AT);
448 __ Daddu(TMP, out, TMP);
449 __ Dsrl(out, TMP, 4);
450 __ Daddu(out, out, TMP);
451 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
452 __ And(out, out, AT);
453 __ LoadConst64(TMP, 0x0101010101010101L);
454 __ Dmul(out, out, TMP);
455 __ Dsrl32(out, out, 24);
456 }
457}
458
459// int java.lang.Integer.bitCount(int)
460void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
461 CreateIntToIntLocations(arena_, invoke);
462}
463
464void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
465 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
466}
467
468// int java.lang.Long.bitCount(long)
469void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
470 CreateIntToIntLocations(arena_, invoke);
471}
472
473void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
474 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
475}
476
Chris Larsen0b7ac982015-09-04 12:54:28 -0700477static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
478 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
479 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
480
481 if (is64bit) {
482 __ AbsD(out, in);
483 } else {
484 __ AbsS(out, in);
485 }
486}
487
488// double java.lang.Math.abs(double)
489void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
490 CreateFPToFPLocations(arena_, invoke);
491}
492
493void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000494 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700495}
496
497// float java.lang.Math.abs(float)
498void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
499 CreateFPToFPLocations(arena_, invoke);
500}
501
502void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000503 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700504}
505
506static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
507 LocationSummary* locations = new (arena) LocationSummary(invoke,
508 LocationSummary::kNoCall,
509 kIntrinsified);
510 locations->SetInAt(0, Location::RequiresRegister());
511 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
512}
513
514static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
515 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
516 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
517
518 if (is64bit) {
519 __ Dsra32(AT, in, 31);
520 __ Xor(out, in, AT);
521 __ Dsubu(out, out, AT);
522 } else {
523 __ Sra(AT, in, 31);
524 __ Xor(out, in, AT);
525 __ Subu(out, out, AT);
526 }
527}
528
529// int java.lang.Math.abs(int)
530void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
531 CreateIntToInt(arena_, invoke);
532}
533
534void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000535 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700536}
537
538// long java.lang.Math.abs(long)
539void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
540 CreateIntToInt(arena_, invoke);
541}
542
543void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000544 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700545}
546
547static void GenMinMaxFP(LocationSummary* locations,
548 bool is_min,
Chris Larsenb74353a2015-11-20 09:07:09 -0800549 Primitive::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700550 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800551 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
552 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700553 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
554
Chris Larsenb74353a2015-11-20 09:07:09 -0800555 Mips64Label noNaNs;
556 Mips64Label done;
557 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
558
559 // When Java computes min/max it prefers a NaN to a number; the
560 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
561 // the inputs is a NaN and the other is a valid number, the MIPS
562 // instruction will return the number; Java wants the NaN value
563 // returned. This is why there is extra logic preceding the use of
564 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
565 // NaN, return the NaN, otherwise return the min/max.
566 if (type == Primitive::kPrimDouble) {
567 __ CmpUnD(FTMP, a, b);
568 __ Bc1eqz(FTMP, &noNaNs);
569
570 // One of the inputs is a NaN
571 __ CmpEqD(ftmp, a, a);
572 // If a == a then b is the NaN, otherwise a is the NaN.
573 __ SelD(ftmp, a, b);
574
575 if (ftmp != out) {
576 __ MovD(out, ftmp);
577 }
578
579 __ Bc(&done);
580
581 __ Bind(&noNaNs);
582
Chris Larsen0b7ac982015-09-04 12:54:28 -0700583 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800584 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700585 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800586 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700587 }
588 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800589 DCHECK_EQ(type, Primitive::kPrimFloat);
590 __ CmpUnS(FTMP, a, b);
591 __ Bc1eqz(FTMP, &noNaNs);
592
593 // One of the inputs is a NaN
594 __ CmpEqS(ftmp, a, a);
595 // If a == a then b is the NaN, otherwise a is the NaN.
596 __ SelS(ftmp, a, b);
597
598 if (ftmp != out) {
599 __ MovS(out, ftmp);
600 }
601
602 __ Bc(&done);
603
604 __ Bind(&noNaNs);
605
Chris Larsen0b7ac982015-09-04 12:54:28 -0700606 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800607 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700608 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800609 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700610 }
611 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800612
613 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700614}
615
616static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
617 LocationSummary* locations = new (arena) LocationSummary(invoke,
618 LocationSummary::kNoCall,
619 kIntrinsified);
620 locations->SetInAt(0, Location::RequiresFpuRegister());
621 locations->SetInAt(1, Location::RequiresFpuRegister());
622 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
623}
624
625// double java.lang.Math.min(double, double)
626void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
627 CreateFPFPToFPLocations(arena_, invoke);
628}
629
630void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800631 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700632}
633
634// float java.lang.Math.min(float, float)
635void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
636 CreateFPFPToFPLocations(arena_, invoke);
637}
638
639void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800640 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700641}
642
643// double java.lang.Math.max(double, double)
644void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
645 CreateFPFPToFPLocations(arena_, invoke);
646}
647
648void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800649 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700650}
651
652// float java.lang.Math.max(float, float)
653void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
654 CreateFPFPToFPLocations(arena_, invoke);
655}
656
657void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800658 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700659}
660
661static void GenMinMax(LocationSummary* locations,
662 bool is_min,
663 Mips64Assembler* assembler) {
664 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
665 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
666 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
667
Chris Larsenb74353a2015-11-20 09:07:09 -0800668 if (lhs == rhs) {
669 if (out != lhs) {
670 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700671 }
672 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800673 // Some architectures, such as ARM and MIPS (prior to r6), have a
674 // conditional move instruction which only changes the target
675 // (output) register if the condition is true (MIPS prior to r6 had
676 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
677 // change the target (output) register. If the condition is true the
678 // output register gets the contents of the "rs" register; otherwise,
679 // the output register is set to zero. One consequence of this is
680 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
681 // needs to use a pair of SELEQZ/SELNEZ instructions. After
682 // executing this pair of instructions one of the output registers
683 // from the pair will necessarily contain zero. Then the code ORs the
684 // output registers from the SELEQZ/SELNEZ instructions to get the
685 // final result.
686 //
687 // The initial test to see if the output register is same as the
688 // first input register is needed to make sure that value in the
689 // first input register isn't clobbered before we've finished
690 // computing the output value. The logic in the corresponding else
691 // clause performs the same task but makes sure the second input
692 // register isn't clobbered in the event that it's the same register
693 // as the output register; the else clause also handles the case
694 // where the output register is distinct from both the first, and the
695 // second input registers.
696 if (out == lhs) {
697 __ Slt(AT, rhs, lhs);
698 if (is_min) {
699 __ Seleqz(out, lhs, AT);
700 __ Selnez(AT, rhs, AT);
701 } else {
702 __ Selnez(out, lhs, AT);
703 __ Seleqz(AT, rhs, AT);
704 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700705 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800706 __ Slt(AT, lhs, rhs);
707 if (is_min) {
708 __ Seleqz(out, rhs, AT);
709 __ Selnez(AT, lhs, AT);
710 } else {
711 __ Selnez(out, rhs, AT);
712 __ Seleqz(AT, lhs, AT);
713 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700714 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800715 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700716 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700717}
718
719static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
720 LocationSummary* locations = new (arena) LocationSummary(invoke,
721 LocationSummary::kNoCall,
722 kIntrinsified);
723 locations->SetInAt(0, Location::RequiresRegister());
724 locations->SetInAt(1, Location::RequiresRegister());
725 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
726}
727
728// int java.lang.Math.min(int, int)
729void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
730 CreateIntIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700735}
736
737// long java.lang.Math.min(long, long)
738void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
739 CreateIntIntToIntLocations(arena_, invoke);
740}
741
742void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000743 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700744}
745
746// int java.lang.Math.max(int, int)
747void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
748 CreateIntIntToIntLocations(arena_, invoke);
749}
750
751void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000752 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700753}
754
755// long java.lang.Math.max(long, long)
756void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
757 CreateIntIntToIntLocations(arena_, invoke);
758}
759
760void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000761 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700762}
763
764// double java.lang.Math.sqrt(double)
765void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
766 CreateFPToFPLocations(arena_, invoke);
767}
768
769void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
770 LocationSummary* locations = invoke->GetLocations();
771 Mips64Assembler* assembler = GetAssembler();
772 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
773 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
774
775 __ SqrtD(out, in);
776}
777
Chris Larsen81284372015-10-21 15:28:53 -0700778static void CreateFPToFP(ArenaAllocator* arena,
779 HInvoke* invoke,
780 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700781 LocationSummary* locations = new (arena) LocationSummary(invoke,
782 LocationSummary::kNoCall,
783 kIntrinsified);
784 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700785 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700786}
787
788// double java.lang.Math.rint(double)
789void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700790 CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700791}
792
793void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
794 LocationSummary* locations = invoke->GetLocations();
795 Mips64Assembler* assembler = GetAssembler();
796 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
797 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
798
799 __ RintD(out, in);
800}
801
802// double java.lang.Math.floor(double)
803void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
804 CreateFPToFP(arena_, invoke);
805}
806
Chris Larsen14500822015-10-01 11:35:18 -0700807const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
808 kPositiveInfinity |
809 kNegativeZero |
810 kNegativeInfinity |
811 kQuietNaN |
812 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700813
Chris Larsen81284372015-10-21 15:28:53 -0700814enum FloatRoundingMode {
815 kFloor,
816 kCeil,
817};
818
819static void GenRoundingMode(LocationSummary* locations,
820 FloatRoundingMode mode,
821 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700822 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
823 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
824
Chris Larsen81284372015-10-21 15:28:53 -0700825 DCHECK_NE(in, out);
826
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700827 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700828
Chris Larsen81284372015-10-21 15:28:53 -0700829 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700830 // if in.isNaN || in.isInfinite || in.isZero {
831 // return in;
832 // }
833 __ ClassD(out, in);
834 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700835 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700836 __ MovD(out, in);
837 __ Bnezc(AT, &done);
838
Chris Larsen81284372015-10-21 15:28:53 -0700839 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200840 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700841 // // floor()/ceil() has almost certainly returned a value
842 // // which can't be successfully represented as a signed
843 // // 64-bit number. Java expects that the input value will
844 // // be returned in these cases.
845 // // There is also a small probability that floor(in)/ceil(in)
846 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200847 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
848 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700849 // return in;
850 // }
Chris Larsen81284372015-10-21 15:28:53 -0700851 if (mode == kFloor) {
852 __ FloorLD(out, in);
853 } else if (mode == kCeil) {
854 __ CeilLD(out, in);
855 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700856 __ Dmfc1(AT, out);
857 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200858 __ Daddiu(TMP, AT, 1);
859 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
860 // or AT - 0x7FFF FFFF FFFF FFFF.
861 // IOW, TMP = 1 if AT = Long.MIN_VALUE
862 // or TMP = 0 if AT = Long.MAX_VALUE.
863 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
864 // or AT = Long.MAX_VALUE.
865 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700866
867 // double out = outLong;
868 // return out;
869 __ Dmtc1(AT, out);
870 __ Cvtdl(out, out);
871 __ Bind(&done);
872 // }
873}
874
Chris Larsen81284372015-10-21 15:28:53 -0700875void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
876 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
877}
878
Chris Larsen0b7ac982015-09-04 12:54:28 -0700879// double java.lang.Math.ceil(double)
880void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
881 CreateFPToFP(arena_, invoke);
882}
883
884void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700885 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700886}
887
Chris Larsen7adaab02016-04-21 14:49:20 -0700888static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, Primitive::Type type) {
889 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
890 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
891 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
892
893 DCHECK(type == Primitive::kPrimFloat || type == Primitive::kPrimDouble);
894
895 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700896
Chris Larsen7adaab02016-04-21 14:49:20 -0700897 // out = floor(in);
898 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200899 // if (out != MAX_VALUE && out != MIN_VALUE) {
900 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700901 // return out += TMP;
902 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200903 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700904
905 // out = floor(in);
906 if (type == Primitive::kPrimDouble) {
907 __ FloorLD(FTMP, in);
908 __ Dmfc1(out, FTMP);
909 } else {
910 __ FloorWS(FTMP, in);
911 __ Mfc1(out, FTMP);
912 }
913
Lena Djokicf4e23a82017-05-09 15:43:45 +0200914 // if (out != MAX_VALUE && out != MIN_VALUE)
Chris Larsen7adaab02016-04-21 14:49:20 -0700915 if (type == Primitive::kPrimDouble) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200916 __ Daddiu(TMP, out, 1);
917 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
918 // or out - 0x7FFF FFFF FFFF FFFF.
919 // IOW, TMP = 1 if out = Long.MIN_VALUE
920 // or TMP = 0 if out = Long.MAX_VALUE.
921 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
922 // or out = Long.MAX_VALUE.
923 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700924 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200925 __ Addiu(TMP, out, 1);
926 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
927 // or out - 0x7FFF FFFF.
928 // IOW, TMP = 1 if out = Int.MIN_VALUE
929 // or TMP = 0 if out = Int.MAX_VALUE.
930 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
931 // or out = Int.MAX_VALUE.
932 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700933 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700934
935 // TMP = (0.5 <= (in - out)) ? -1 : 0;
936 if (type == Primitive::kPrimDouble) {
937 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
938 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
939 __ SubD(FTMP, in, FTMP);
940 __ Dmtc1(AT, half);
941 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200942 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700943 } else {
944 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
945 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
946 __ SubS(FTMP, in, FTMP);
947 __ Mtc1(AT, half);
948 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200949 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700950 }
951
Chris Larsen7adaab02016-04-21 14:49:20 -0700952 // Return out -= TMP.
953 if (type == Primitive::kPrimDouble) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200954 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700955 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200956 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700957 }
958
959 __ Bind(&done);
960}
961
962// int java.lang.Math.round(float)
963void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
964 LocationSummary* locations = new (arena_) LocationSummary(invoke,
965 LocationSummary::kNoCall,
966 kIntrinsified);
967 locations->SetInAt(0, Location::RequiresFpuRegister());
968 locations->AddTemp(Location::RequiresFpuRegister());
969 locations->SetOut(Location::RequiresRegister());
970}
971
972void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
973 GenRound(invoke->GetLocations(), GetAssembler(), Primitive::kPrimFloat);
974}
975
976// long java.lang.Math.round(double)
977void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
978 LocationSummary* locations = new (arena_) LocationSummary(invoke,
979 LocationSummary::kNoCall,
980 kIntrinsified);
981 locations->SetInAt(0, Location::RequiresFpuRegister());
982 locations->AddTemp(Location::RequiresFpuRegister());
983 locations->SetOut(Location::RequiresRegister());
984}
985
986void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
987 GenRound(invoke->GetLocations(), GetAssembler(), Primitive::kPrimDouble);
988}
989
Chris Larsen70fb1f42015-09-04 10:15:27 -0700990// byte libcore.io.Memory.peekByte(long address)
991void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
992 CreateIntToIntLocations(arena_, invoke);
993}
994
995void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
996 Mips64Assembler* assembler = GetAssembler();
997 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
998 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
999
1000 __ Lb(out, adr, 0);
1001}
1002
1003// short libcore.io.Memory.peekShort(long address)
1004void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
1005 CreateIntToIntLocations(arena_, invoke);
1006}
1007
1008void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
1009 Mips64Assembler* assembler = GetAssembler();
1010 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1011 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1012
1013 __ Lh(out, adr, 0);
1014}
1015
1016// int libcore.io.Memory.peekInt(long address)
1017void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
1018 CreateIntToIntLocations(arena_, invoke);
1019}
1020
1021void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
1022 Mips64Assembler* assembler = GetAssembler();
1023 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1024 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1025
1026 __ Lw(out, adr, 0);
1027}
1028
1029// long libcore.io.Memory.peekLong(long address)
1030void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
1031 CreateIntToIntLocations(arena_, invoke);
1032}
1033
1034void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
1035 Mips64Assembler* assembler = GetAssembler();
1036 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1037 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1038
1039 __ Ld(out, adr, 0);
1040}
1041
1042static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1043 LocationSummary* locations = new (arena) LocationSummary(invoke,
1044 LocationSummary::kNoCall,
1045 kIntrinsified);
1046 locations->SetInAt(0, Location::RequiresRegister());
1047 locations->SetInAt(1, Location::RequiresRegister());
1048}
1049
1050// void libcore.io.Memory.pokeByte(long address, byte value)
1051void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
1052 CreateIntIntToVoidLocations(arena_, invoke);
1053}
1054
1055void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
1056 Mips64Assembler* assembler = GetAssembler();
1057 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1058 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1059
1060 __ Sb(val, adr, 0);
1061}
1062
1063// void libcore.io.Memory.pokeShort(long address, short value)
1064void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1065 CreateIntIntToVoidLocations(arena_, invoke);
1066}
1067
1068void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1069 Mips64Assembler* assembler = GetAssembler();
1070 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1071 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1072
1073 __ Sh(val, adr, 0);
1074}
1075
1076// void libcore.io.Memory.pokeInt(long address, int value)
1077void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1078 CreateIntIntToVoidLocations(arena_, invoke);
1079}
1080
1081void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1082 Mips64Assembler* assembler = GetAssembler();
1083 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1084 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1085
1086 __ Sw(val, adr, 00);
1087}
1088
1089// void libcore.io.Memory.pokeLong(long address, long value)
1090void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1091 CreateIntIntToVoidLocations(arena_, invoke);
1092}
1093
1094void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1095 Mips64Assembler* assembler = GetAssembler();
1096 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1097 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1098
1099 __ Sd(val, adr, 0);
1100}
1101
Chris Larsen49e55392015-09-04 16:04:03 -07001102// Thread java.lang.Thread.currentThread()
1103void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1104 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1105 LocationSummary::kNoCall,
1106 kIntrinsified);
1107 locations->SetOut(Location::RequiresRegister());
1108}
1109
1110void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1111 Mips64Assembler* assembler = GetAssembler();
1112 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1113
1114 __ LoadFromOffset(kLoadUnsignedWord,
1115 out,
1116 TR,
1117 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1118}
1119
Alexey Frunze15958152017-02-09 19:08:30 -08001120static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1121 HInvoke* invoke,
1122 Primitive::Type type) {
1123 bool can_call = kEmitCompilerReadBarrier &&
1124 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1125 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Chris Larsen1360ada2015-09-04 23:38:16 -07001126 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001127 (can_call
1128 ? LocationSummary::kCallOnSlowPath
1129 : LocationSummary::kNoCall),
Chris Larsen1360ada2015-09-04 23:38:16 -07001130 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001131 if (can_call && kUseBakerReadBarrier) {
1132 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1133 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001134 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1135 locations->SetInAt(1, Location::RequiresRegister());
1136 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001137 locations->SetOut(Location::RequiresRegister(),
1138 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
1139 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1140 // We need a temporary register for the read barrier marking slow
1141 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
1142 locations->AddTemp(Location::RequiresRegister());
1143 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001144}
1145
Alexey Frunze15958152017-02-09 19:08:30 -08001146// Note that the caller must supply a properly aligned memory address.
1147// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001148static void GenUnsafeGet(HInvoke* invoke,
1149 Primitive::Type type,
1150 bool is_volatile,
1151 CodeGeneratorMIPS64* codegen) {
1152 LocationSummary* locations = invoke->GetLocations();
1153 DCHECK((type == Primitive::kPrimInt) ||
1154 (type == Primitive::kPrimLong) ||
Alexey Frunze15958152017-02-09 19:08:30 -08001155 (type == Primitive::kPrimNot)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -07001156 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001157 // Target register.
1158 Location trg_loc = locations->Out();
1159 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001160 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001161 Location base_loc = locations->InAt(1);
1162 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001163 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001164 Location offset_loc = locations->InAt(2);
1165 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001166
Alexey Frunze15958152017-02-09 19:08:30 -08001167 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == Primitive::kPrimNot))) {
1168 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -07001169 }
Alexey Frunze15958152017-02-09 19:08:30 -08001170
Chris Larsen1360ada2015-09-04 23:38:16 -07001171 switch (type) {
Alexey Frunze15958152017-02-09 19:08:30 -08001172 case Primitive::kPrimLong:
1173 __ Ld(trg, TMP, 0);
1174 if (is_volatile) {
1175 __ Sync(0);
1176 }
1177 break;
1178
Chris Larsen1360ada2015-09-04 23:38:16 -07001179 case Primitive::kPrimInt:
1180 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -08001181 if (is_volatile) {
1182 __ Sync(0);
1183 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001184 break;
1185
1186 case Primitive::kPrimNot:
Alexey Frunze15958152017-02-09 19:08:30 -08001187 if (kEmitCompilerReadBarrier) {
1188 if (kUseBakerReadBarrier) {
1189 Location temp = locations->GetTemp(0);
1190 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1191 trg_loc,
1192 base,
1193 /* offset */ 0U,
1194 /* index */ offset_loc,
1195 TIMES_1,
1196 temp,
1197 /* needs_null_check */ false);
1198 if (is_volatile) {
1199 __ Sync(0);
1200 }
1201 } else {
1202 __ Lwu(trg, TMP, 0);
1203 if (is_volatile) {
1204 __ Sync(0);
1205 }
1206 codegen->GenerateReadBarrierSlow(invoke,
1207 trg_loc,
1208 trg_loc,
1209 base_loc,
1210 /* offset */ 0U,
1211 /* index */ offset_loc);
1212 }
1213 } else {
1214 __ Lwu(trg, TMP, 0);
1215 if (is_volatile) {
1216 __ Sync(0);
1217 }
1218 __ MaybeUnpoisonHeapReference(trg);
1219 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001220 break;
1221
1222 default:
1223 LOG(FATAL) << "Unsupported op size " << type;
1224 UNREACHABLE();
1225 }
1226}
1227
1228// int sun.misc.Unsafe.getInt(Object o, long offset)
1229void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001230 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen1360ada2015-09-04 23:38:16 -07001231}
1232
1233void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001234 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001235}
1236
1237// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1238void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001239 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen1360ada2015-09-04 23:38:16 -07001240}
1241
1242void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001243 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001244}
1245
1246// long sun.misc.Unsafe.getLong(Object o, long offset)
1247void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001248 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen1360ada2015-09-04 23:38:16 -07001249}
1250
1251void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001252 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001253}
1254
1255// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1256void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001257 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen1360ada2015-09-04 23:38:16 -07001258}
1259
1260void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001261 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001262}
1263
1264// Object sun.misc.Unsafe.getObject(Object o, long offset)
1265void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001266 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen1360ada2015-09-04 23:38:16 -07001267}
1268
1269void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001270 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001271}
1272
1273// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1274void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001275 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen1360ada2015-09-04 23:38:16 -07001276}
1277
1278void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001279 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001280}
1281
1282static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1283 LocationSummary* locations = new (arena) LocationSummary(invoke,
1284 LocationSummary::kNoCall,
1285 kIntrinsified);
1286 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1287 locations->SetInAt(1, Location::RequiresRegister());
1288 locations->SetInAt(2, Location::RequiresRegister());
1289 locations->SetInAt(3, Location::RequiresRegister());
1290}
1291
Alexey Frunze15958152017-02-09 19:08:30 -08001292// Note that the caller must supply a properly aligned memory address.
1293// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001294static void GenUnsafePut(LocationSummary* locations,
1295 Primitive::Type type,
1296 bool is_volatile,
1297 bool is_ordered,
1298 CodeGeneratorMIPS64* codegen) {
1299 DCHECK((type == Primitive::kPrimInt) ||
1300 (type == Primitive::kPrimLong) ||
1301 (type == Primitive::kPrimNot));
1302 Mips64Assembler* assembler = codegen->GetAssembler();
1303 // Object pointer.
1304 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1305 // Long offset.
1306 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1307 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1308
1309 __ Daddu(TMP, base, offset);
1310 if (is_volatile || is_ordered) {
1311 __ Sync(0);
1312 }
1313 switch (type) {
1314 case Primitive::kPrimInt:
1315 case Primitive::kPrimNot:
Alexey Frunzec061de12017-02-14 13:27:23 -08001316 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1317 __ PoisonHeapReference(AT, value);
1318 __ Sw(AT, TMP, 0);
1319 } else {
1320 __ Sw(value, TMP, 0);
1321 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001322 break;
1323
1324 case Primitive::kPrimLong:
1325 __ Sd(value, TMP, 0);
1326 break;
1327
1328 default:
1329 LOG(FATAL) << "Unsupported op size " << type;
1330 UNREACHABLE();
1331 }
1332 if (is_volatile) {
1333 __ Sync(0);
1334 }
1335
1336 if (type == Primitive::kPrimNot) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001337 bool value_can_be_null = true; // TODO: Worth finding out this information?
1338 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001339 }
1340}
1341
1342// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1343void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1344 CreateIntIntIntIntToVoid(arena_, invoke);
1345}
1346
1347void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001348 GenUnsafePut(invoke->GetLocations(),
1349 Primitive::kPrimInt,
1350 /* is_volatile */ false,
1351 /* is_ordered */ false,
1352 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001353}
1354
1355// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1356void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1357 CreateIntIntIntIntToVoid(arena_, invoke);
1358}
1359
1360void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001361 GenUnsafePut(invoke->GetLocations(),
1362 Primitive::kPrimInt,
1363 /* is_volatile */ false,
1364 /* is_ordered */ true,
1365 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001366}
1367
1368// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1369void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1370 CreateIntIntIntIntToVoid(arena_, invoke);
1371}
1372
1373void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001374 GenUnsafePut(invoke->GetLocations(),
1375 Primitive::kPrimInt,
1376 /* is_volatile */ true,
1377 /* is_ordered */ false,
1378 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001379}
1380
1381// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1382void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1383 CreateIntIntIntIntToVoid(arena_, invoke);
1384}
1385
1386void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001387 GenUnsafePut(invoke->GetLocations(),
1388 Primitive::kPrimNot,
1389 /* is_volatile */ false,
1390 /* is_ordered */ false,
1391 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001392}
1393
1394// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1395void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1396 CreateIntIntIntIntToVoid(arena_, invoke);
1397}
1398
1399void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001400 GenUnsafePut(invoke->GetLocations(),
1401 Primitive::kPrimNot,
1402 /* is_volatile */ false,
1403 /* is_ordered */ true,
1404 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001405}
1406
1407// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1408void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1409 CreateIntIntIntIntToVoid(arena_, invoke);
1410}
1411
1412void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001413 GenUnsafePut(invoke->GetLocations(),
1414 Primitive::kPrimNot,
1415 /* is_volatile */ true,
1416 /* is_ordered */ false,
1417 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001418}
1419
1420// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1421void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1422 CreateIntIntIntIntToVoid(arena_, invoke);
1423}
1424
1425void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001426 GenUnsafePut(invoke->GetLocations(),
1427 Primitive::kPrimLong,
1428 /* is_volatile */ false,
1429 /* is_ordered */ false,
1430 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001431}
1432
1433// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1434void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1435 CreateIntIntIntIntToVoid(arena_, invoke);
1436}
1437
1438void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001439 GenUnsafePut(invoke->GetLocations(),
1440 Primitive::kPrimLong,
1441 /* is_volatile */ false,
1442 /* is_ordered */ true,
1443 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001444}
1445
1446// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1447void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1448 CreateIntIntIntIntToVoid(arena_, invoke);
1449}
1450
1451void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001452 GenUnsafePut(invoke->GetLocations(),
1453 Primitive::kPrimLong,
1454 /* is_volatile */ true,
1455 /* is_ordered */ false,
1456 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001457}
1458
Alexey Frunze15958152017-02-09 19:08:30 -08001459static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, HInvoke* invoke) {
1460 bool can_call = kEmitCompilerReadBarrier &&
1461 kUseBakerReadBarrier &&
1462 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Chris Larsen36427492015-10-23 02:19:38 -07001463 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001464 (can_call
1465 ? LocationSummary::kCallOnSlowPath
1466 : LocationSummary::kNoCall),
Chris Larsen36427492015-10-23 02:19:38 -07001467 kIntrinsified);
1468 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1469 locations->SetInAt(1, Location::RequiresRegister());
1470 locations->SetInAt(2, Location::RequiresRegister());
1471 locations->SetInAt(3, Location::RequiresRegister());
1472 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001473 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001474
1475 // Temporary register used in CAS by (Baker) read barrier.
1476 if (can_call) {
1477 locations->AddTemp(Location::RequiresRegister());
1478 }
Chris Larsen36427492015-10-23 02:19:38 -07001479}
1480
Alexey Frunze15958152017-02-09 19:08:30 -08001481// Note that the caller must supply a properly aligned memory address.
1482// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
1483static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001484 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001485 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001486 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001487 Location offset_loc = locations->InAt(2);
1488 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001489 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1490 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001491 Location out_loc = locations->Out();
1492 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001493
1494 DCHECK_NE(base, out);
1495 DCHECK_NE(offset, out);
1496 DCHECK_NE(expected, out);
1497
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001498 if (type == Primitive::kPrimNot) {
Alexey Frunze15958152017-02-09 19:08:30 -08001499 // The only read barrier implementation supporting the
1500 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1501 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1502
1503 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1504 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001505 bool value_can_be_null = true; // TODO: Worth finding out this information?
1506 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001507
1508 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1509 Location temp = locations->GetTemp(0);
1510 // Need to make sure the reference stored in the field is a to-space
1511 // one before attempting the CAS or the CAS could fail incorrectly.
1512 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1513 invoke,
1514 out_loc, // Unused, used only as a "temporary" within the read barrier.
1515 base,
1516 /* offset */ 0u,
1517 /* index */ offset_loc,
1518 ScaleFactor::TIMES_1,
1519 temp,
1520 /* needs_null_check */ false,
1521 /* always_update_field */ true);
1522 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001523 }
1524
Alexey Frunzec061de12017-02-14 13:27:23 -08001525 Mips64Label loop_head, exit_loop;
1526 __ Daddu(TMP, base, offset);
1527
1528 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1529 __ PoisonHeapReference(expected);
1530 // Do not poison `value`, if it is the same register as
1531 // `expected`, which has just been poisoned.
1532 if (value != expected) {
1533 __ PoisonHeapReference(value);
1534 }
1535 }
1536
Chris Larsen36427492015-10-23 02:19:38 -07001537 // do {
1538 // tmp_value = [tmp_ptr] - expected;
1539 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1540 // result = tmp_value != 0;
1541
Chris Larsen36427492015-10-23 02:19:38 -07001542 __ Sync(0);
1543 __ Bind(&loop_head);
1544 if (type == Primitive::kPrimLong) {
1545 __ Lld(out, TMP);
1546 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001547 // Note: We will need a read barrier here, when read barrier
1548 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001549 __ Ll(out, TMP);
Alexey Frunzec061de12017-02-14 13:27:23 -08001550 if (type == Primitive::kPrimNot) {
1551 // The LL instruction sign-extends the 32-bit value, but
1552 // 32-bit references must be zero-extended. Zero-extend `out`.
1553 __ Dext(out, out, 0, 32);
1554 }
Chris Larsen36427492015-10-23 02:19:38 -07001555 }
1556 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1557 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1558 __ Beqzc(out, &exit_loop); // return.
1559 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1560 // If we use 'value' directly, we would lose 'value'
1561 // in the case that the store fails. Whether the
1562 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001563 // correct Boolean value into the 'out' register.
Chris Larsen36427492015-10-23 02:19:38 -07001564 if (type == Primitive::kPrimLong) {
1565 __ Scd(out, TMP);
1566 } else {
1567 __ Sc(out, TMP);
1568 }
1569 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1570 // cycle atomically then retry.
1571 __ Bind(&exit_loop);
1572 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001573
1574 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1575 __ UnpoisonHeapReference(expected);
1576 // Do not unpoison `value`, if it is the same register as
1577 // `expected`, which has just been unpoisoned.
1578 if (value != expected) {
1579 __ UnpoisonHeapReference(value);
1580 }
1581 }
Chris Larsen36427492015-10-23 02:19:38 -07001582}
1583
1584// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1585void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001586 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001587}
1588
1589void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001590 GenCas(invoke, Primitive::kPrimInt, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001591}
1592
1593// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1594void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001595 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001596}
1597
1598void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001599 GenCas(invoke, Primitive::kPrimLong, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001600}
1601
1602// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1603void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001604 // The only read barrier implementation supporting the
1605 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1606 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1607 return;
1608 }
1609
1610 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001611}
1612
1613void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001614 // The only read barrier implementation supporting the
1615 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1616 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1617
1618 GenCas(invoke, Primitive::kPrimNot, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001619}
1620
Chris Larsen9701c2e2015-09-04 17:22:47 -07001621// int java.lang.String.compareTo(String anotherString)
1622void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1623 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescufc734082016-07-19 17:18:07 +01001624 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001625 kIntrinsified);
1626 InvokeRuntimeCallingConvention calling_convention;
1627 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1628 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1629 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1630 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1631}
1632
1633void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1634 Mips64Assembler* assembler = GetAssembler();
1635 LocationSummary* locations = invoke->GetLocations();
1636
1637 // Note that the null check must have been done earlier.
1638 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1639
1640 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1641 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1642 codegen_->AddSlowPath(slow_path);
1643 __ Beqzc(argument, slow_path->GetEntryLabel());
1644
Serban Constantinescufc734082016-07-19 17:18:07 +01001645 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001646 __ Bind(slow_path->GetExitLabel());
1647}
1648
Chris Larsen972d6d72015-10-20 11:29:12 -07001649// boolean java.lang.String.equals(Object anObject)
1650void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
1651 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1652 LocationSummary::kNoCall,
1653 kIntrinsified);
1654 locations->SetInAt(0, Location::RequiresRegister());
1655 locations->SetInAt(1, Location::RequiresRegister());
1656 locations->SetOut(Location::RequiresRegister());
1657
1658 // Temporary registers to store lengths of strings and for calculations.
1659 locations->AddTemp(Location::RequiresRegister());
1660 locations->AddTemp(Location::RequiresRegister());
1661 locations->AddTemp(Location::RequiresRegister());
1662}
1663
1664void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1665 Mips64Assembler* assembler = GetAssembler();
1666 LocationSummary* locations = invoke->GetLocations();
1667
1668 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1669 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1670 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1671
1672 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1673 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1674 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1675
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001676 Mips64Label loop;
1677 Mips64Label end;
1678 Mips64Label return_true;
1679 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001680
1681 // Get offsets of count, value, and class fields within a string object.
1682 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1683 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1684 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1685
1686 // Note that the null check must have been done earlier.
1687 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1688
1689 // If the register containing the pointer to "this", and the register
1690 // containing the pointer to "anObject" are the same register then
1691 // "this", and "anObject" are the same object and we can
1692 // short-circuit the logic to a true result.
1693 if (str == arg) {
1694 __ LoadConst64(out, 1);
1695 return;
1696 }
1697
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001698 StringEqualsOptimizations optimizations(invoke);
1699 if (!optimizations.GetArgumentNotNull()) {
1700 // Check if input is null, return false if it is.
1701 __ Beqzc(arg, &return_false);
1702 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001703
1704 // Reference equality check, return true if same reference.
1705 __ Beqc(str, arg, &return_true);
1706
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001707 if (!optimizations.GetArgumentIsString()) {
1708 // Instanceof check for the argument by comparing class fields.
1709 // All string objects must have the same type since String cannot be subclassed.
1710 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1711 // If the argument is a string object, its class field must be equal to receiver's class field.
1712 __ Lw(temp1, str, class_offset);
1713 __ Lw(temp2, arg, class_offset);
1714 __ Bnec(temp1, temp2, &return_false);
1715 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001716
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001717 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001718 __ Lw(temp1, str, count_offset);
1719 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001720 // Check if `count` fields are equal, return false if they're not.
1721 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001722 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001723 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1724 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1725 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001726 __ Beqzc(temp1, &return_true);
1727
1728 // Don't overwrite input registers
1729 __ Move(TMP, str);
1730 __ Move(temp3, arg);
1731
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001732 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001733 DCHECK_ALIGNED(value_offset, 8);
1734 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1735
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001736 if (mirror::kUseStringCompression) {
1737 // For string compression, calculate the number of bytes to compare (not chars).
1738 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1739 __ Srl(temp1, temp1, 1); // Extract length.
1740 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1741 }
1742
1743 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1744 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001745 __ Bind(&loop);
1746 __ Ld(out, TMP, value_offset);
1747 __ Ld(temp2, temp3, value_offset);
1748 __ Bnec(out, temp2, &return_false);
1749 __ Daddiu(TMP, TMP, 8);
1750 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001751 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1752 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001753 __ Bgtzc(temp1, &loop);
1754
1755 // Return true and exit the function.
1756 // If loop does not result in returning false, we return true.
1757 __ Bind(&return_true);
1758 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001759 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001760
1761 // Return false and exit the function.
1762 __ Bind(&return_false);
1763 __ LoadConst64(out, 0);
1764 __ Bind(&end);
1765}
1766
Chris Larsen9701c2e2015-09-04 17:22:47 -07001767static void GenerateStringIndexOf(HInvoke* invoke,
1768 Mips64Assembler* assembler,
1769 CodeGeneratorMIPS64* codegen,
1770 ArenaAllocator* allocator,
1771 bool start_at_zero) {
1772 LocationSummary* locations = invoke->GetLocations();
1773 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1774
1775 // Note that the null check must have been done earlier.
1776 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1777
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001778 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1779 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001780 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001781 HInstruction* code_point = invoke->InputAt(1);
1782 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001783 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001784 // Always needs the slow-path. We could directly dispatch to it,
1785 // but this case should be rare, so for simplicity just put the
1786 // full slow-path down and branch unconditionally.
1787 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1788 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001789 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001790 __ Bind(slow_path->GetExitLabel());
1791 return;
1792 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001793 } else if (code_point->GetType() != Primitive::kPrimChar) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001794 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1795 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1796 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1797 codegen->AddSlowPath(slow_path);
1798 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1799 }
1800
1801 if (start_at_zero) {
1802 DCHECK_EQ(tmp_reg, A2);
1803 // Start-index = 0.
1804 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001805 }
1806
Serban Constantinescufc734082016-07-19 17:18:07 +01001807 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001808 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001809
1810 if (slow_path != nullptr) {
1811 __ Bind(slow_path->GetExitLabel());
1812 }
1813}
1814
1815// int java.lang.String.indexOf(int ch)
1816void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1817 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001818 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001819 kIntrinsified);
1820 // We have a hand-crafted assembly stub that follows the runtime
1821 // calling convention. So it's best to align the inputs accordingly.
1822 InvokeRuntimeCallingConvention calling_convention;
1823 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1824 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1825 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1826 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1827
1828 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1829 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1830}
1831
1832void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001833 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001834}
1835
1836// int java.lang.String.indexOf(int ch, int fromIndex)
1837void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1838 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001839 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001840 kIntrinsified);
1841 // We have a hand-crafted assembly stub that follows the runtime
1842 // calling convention. So it's best to align the inputs accordingly.
1843 InvokeRuntimeCallingConvention calling_convention;
1844 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1845 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1846 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1847 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1848 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1849}
1850
1851void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001852 GenerateStringIndexOf(
1853 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001854}
1855
Roland Levillaincc3839c2016-02-29 16:23:48 +00001856// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001857void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1858 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001859 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001860 kIntrinsified);
1861 InvokeRuntimeCallingConvention calling_convention;
1862 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1863 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1864 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1865 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1866 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1867 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1868}
1869
1870void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1871 Mips64Assembler* assembler = GetAssembler();
1872 LocationSummary* locations = invoke->GetLocations();
1873
1874 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1875 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1876 codegen_->AddSlowPath(slow_path);
1877 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1878
Serban Constantinescufc734082016-07-19 17:18:07 +01001879 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001880 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001881 __ Bind(slow_path->GetExitLabel());
1882}
1883
Roland Levillaincc3839c2016-02-29 16:23:48 +00001884// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001885void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1886 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001887 LocationSummary::kCallOnMainOnly,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001888 kIntrinsified);
1889 InvokeRuntimeCallingConvention calling_convention;
1890 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1891 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1892 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1893 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1894 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1895}
1896
1897void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001898 // No need to emit code checking whether `locations->InAt(2)` is a null
1899 // pointer, as callers of the native method
1900 //
1901 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1902 //
1903 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001904 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001905 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001906}
1907
Roland Levillainf969a202016-03-09 16:14:00 +00001908// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001909void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1910 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001911 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001912 kIntrinsified);
1913 InvokeRuntimeCallingConvention calling_convention;
1914 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Chris Larsen9701c2e2015-09-04 17:22:47 -07001915 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1916 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1917}
1918
1919void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1920 Mips64Assembler* assembler = GetAssembler();
1921 LocationSummary* locations = invoke->GetLocations();
1922
1923 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1924 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1925 codegen_->AddSlowPath(slow_path);
1926 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1927
Serban Constantinescufc734082016-07-19 17:18:07 +01001928 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001929 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001930 __ Bind(slow_path->GetExitLabel());
1931}
1932
Chris Larsenddec7f92016-02-16 12:35:04 -08001933static void GenIsInfinite(LocationSummary* locations,
1934 bool is64bit,
1935 Mips64Assembler* assembler) {
1936 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1937 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1938
1939 if (is64bit) {
1940 __ ClassD(FTMP, in);
1941 } else {
1942 __ ClassS(FTMP, in);
1943 }
1944 __ Mfc1(out, FTMP);
1945 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1946 __ Sltu(out, ZERO, out);
1947}
1948
1949// boolean java.lang.Float.isInfinite(float)
1950void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1951 CreateFPToIntLocations(arena_, invoke);
1952}
1953
1954void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1955 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1956}
1957
1958// boolean java.lang.Double.isInfinite(double)
1959void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1960 CreateFPToIntLocations(arena_, invoke);
1961}
1962
1963void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1964 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1965}
1966
Chris Larsene3660592016-11-09 11:13:42 -08001967// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1968void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1969 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Chris Larsen366d4332017-03-23 09:02:56 -07001970 LocationSummary::kNoCall,
Chris Larsene3660592016-11-09 11:13:42 -08001971 kIntrinsified);
1972 locations->SetInAt(0, Location::RequiresRegister());
1973 locations->SetInAt(1, Location::RequiresRegister());
1974 locations->SetInAt(2, Location::RequiresRegister());
1975 locations->SetInAt(3, Location::RequiresRegister());
1976 locations->SetInAt(4, Location::RequiresRegister());
1977
Chris Larsen366d4332017-03-23 09:02:56 -07001978 locations->AddTemp(Location::RequiresRegister());
1979 locations->AddTemp(Location::RequiresRegister());
1980 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001981}
1982
1983void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1984 Mips64Assembler* assembler = GetAssembler();
1985 LocationSummary* locations = invoke->GetLocations();
1986
1987 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1988 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1989 DCHECK_EQ(char_size, 2u);
1990 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
1991
1992 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1993 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1994 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1995 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1996 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1997
1998 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001999 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08002000 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08002001
2002 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07002003 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08002004
2005 // Location of data in char array buffer.
2006 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2007
2008 // Get offset of value field within a string object.
2009 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
2010
2011 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
2012
2013 // Calculate number of characters to be copied.
2014 __ Dsubu(numChrs, srcEnd, srcBegin);
2015
2016 // Calculate destination address.
2017 __ Daddiu(dstPtr, dstObj, data_offset);
2018 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
2019
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002020 if (mirror::kUseStringCompression) {
2021 Mips64Label uncompressed_copy, compressed_loop;
2022 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2023 // Load count field and extract compression flag.
2024 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2025 __ Dext(TMP, TMP, 0, 1);
2026
Chris Larsen366d4332017-03-23 09:02:56 -07002027 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002028 __ Bnezc(TMP, &uncompressed_copy);
2029
2030 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2031 __ Daddu(srcPtr, srcObj, srcBegin);
2032 __ Bind(&compressed_loop);
2033 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2034 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2035 __ Daddiu(numChrs, numChrs, -1);
2036 __ Daddiu(srcPtr, srcPtr, 1);
2037 __ Daddiu(dstPtr, dstPtr, 2);
2038 __ Bnezc(numChrs, &compressed_loop);
2039
2040 __ Bc(&done);
2041 __ Bind(&uncompressed_copy);
2042 }
2043
Chris Larsene3660592016-11-09 11:13:42 -08002044 // Calculate source address.
2045 __ Daddiu(srcPtr, srcObj, value_offset);
2046 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
2047
Chris Larsen366d4332017-03-23 09:02:56 -07002048 __ Bind(&loop);
2049 __ Lh(AT, srcPtr, 0);
2050 __ Daddiu(numChrs, numChrs, -1);
2051 __ Daddiu(srcPtr, srcPtr, char_size);
2052 __ Sh(AT, dstPtr, 0);
2053 __ Daddiu(dstPtr, dstPtr, char_size);
2054 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08002055
2056 __ Bind(&done);
2057}
2058
Chris Larsen5863f852017-03-23 15:41:37 -07002059// static void java.lang.System.arraycopy(Object src, int srcPos,
2060// Object dest, int destPos,
2061// int length)
2062void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2063 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2064 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2065 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2066
2067 // As long as we are checking, we might as well check to see if the src and dest
2068 // positions are >= 0.
2069 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2070 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2071 // We will have to fail anyways.
2072 return;
2073 }
2074
2075 // And since we are already checking, check the length too.
2076 if (length != nullptr) {
2077 int32_t len = length->GetValue();
2078 if (len < 0) {
2079 // Just call as normal.
2080 return;
2081 }
2082 }
2083
2084 // Okay, it is safe to generate inline code.
2085 LocationSummary* locations =
2086 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
2087 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2088 locations->SetInAt(0, Location::RequiresRegister());
2089 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2090 locations->SetInAt(2, Location::RequiresRegister());
2091 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2092 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2093
2094 locations->AddTemp(Location::RequiresRegister());
2095 locations->AddTemp(Location::RequiresRegister());
2096 locations->AddTemp(Location::RequiresRegister());
2097}
2098
2099// Utility routine to verify that "length(input) - pos >= length"
2100static void EnoughItems(Mips64Assembler* assembler,
2101 GpuRegister length_input_minus_pos,
2102 Location length,
2103 SlowPathCodeMIPS64* slow_path) {
2104 if (length.IsConstant()) {
2105 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2106
2107 if (IsInt<16>(length_constant)) {
2108 __ Slti(TMP, length_input_minus_pos, length_constant);
2109 __ Bnezc(TMP, slow_path->GetEntryLabel());
2110 } else {
2111 __ LoadConst32(TMP, length_constant);
2112 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2113 }
2114 } else {
2115 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2116 }
2117}
2118
2119static void CheckPosition(Mips64Assembler* assembler,
2120 Location pos,
2121 GpuRegister input,
2122 Location length,
2123 SlowPathCodeMIPS64* slow_path,
2124 bool length_is_input_length = false) {
2125 // Where is the length in the Array?
2126 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2127
2128 // Calculate length(input) - pos.
2129 if (pos.IsConstant()) {
2130 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2131 if (pos_const == 0) {
2132 if (!length_is_input_length) {
2133 // Check that length(input) >= length.
2134 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2135 EnoughItems(assembler, AT, length, slow_path);
2136 }
2137 } else {
2138 // Check that (length(input) - pos) >= zero.
2139 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2140 DCHECK_GT(pos_const, 0);
2141 __ Addiu32(AT, AT, -pos_const);
2142 __ Bltzc(AT, slow_path->GetEntryLabel());
2143
2144 // Verify that (length(input) - pos) >= length.
2145 EnoughItems(assembler, AT, length, slow_path);
2146 }
2147 } else if (length_is_input_length) {
2148 // The only way the copy can succeed is if pos is zero.
2149 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2150 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
2151 } else {
2152 // Verify that pos >= 0.
2153 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2154 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
2155
2156 // Check that (length(input) - pos) >= zero.
2157 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2158 __ Subu(AT, AT, pos_reg);
2159 __ Bltzc(AT, slow_path->GetEntryLabel());
2160
2161 // Verify that (length(input) - pos) >= length.
2162 EnoughItems(assembler, AT, length, slow_path);
2163 }
2164}
2165
2166void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2167 Mips64Assembler* assembler = GetAssembler();
2168 LocationSummary* locations = invoke->GetLocations();
2169
2170 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
2171 Location src_pos = locations->InAt(1);
2172 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
2173 Location dest_pos = locations->InAt(3);
2174 Location length = locations->InAt(4);
2175
2176 Mips64Label loop;
2177
2178 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
2179 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
2180 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
2181
2182 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
2183 codegen_->AddSlowPath(slow_path);
2184
2185 // Bail out if the source and destination are the same (to handle overlap).
2186 __ Beqc(src, dest, slow_path->GetEntryLabel());
2187
2188 // Bail out if the source is null.
2189 __ Beqzc(src, slow_path->GetEntryLabel());
2190
2191 // Bail out if the destination is null.
2192 __ Beqzc(dest, slow_path->GetEntryLabel());
2193
2194 // Load length into register for count.
2195 if (length.IsConstant()) {
2196 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2197 } else {
2198 // If the length is negative, bail out.
2199 // We have already checked in the LocationsBuilder for the constant case.
2200 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2201
2202 __ Move(count, length.AsRegister<GpuRegister>());
2203 }
2204
2205 // Validity checks: source.
2206 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2207
2208 // Validity checks: dest.
2209 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2210
2211 // If count is zero, we're done.
2212 __ Beqzc(count, slow_path->GetExitLabel());
2213
2214 // Okay, everything checks out. Finally time to do the copy.
2215 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2216 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2217 DCHECK_EQ(char_size, 2u);
2218
2219 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
2220
2221 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2222
2223 // Calculate source and destination addresses.
2224 if (src_pos.IsConstant()) {
2225 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2226
2227 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
2228 } else {
2229 __ Daddiu64(src_base, src, data_offset, TMP);
2230 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
2231 }
2232 if (dest_pos.IsConstant()) {
2233 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2234
2235 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2236 } else {
2237 __ Daddiu64(dest_base, dest, data_offset, TMP);
2238 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
2239 }
2240
2241 __ Bind(&loop);
2242 __ Lh(TMP, src_base, 0);
2243 __ Daddiu(src_base, src_base, char_size);
2244 __ Daddiu(count, count, -1);
2245 __ Sh(TMP, dest_base, 0);
2246 __ Daddiu(dest_base, dest_base, char_size);
2247 __ Bnezc(count, &loop);
2248
2249 __ Bind(slow_path->GetExitLabel());
2250}
2251
Chris Larsenab922502016-04-15 10:00:56 -07002252static void GenHighestOneBit(LocationSummary* locations,
2253 Primitive::Type type,
2254 Mips64Assembler* assembler) {
2255 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << PrettyDescriptor(type);
2256
2257 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2258 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2259
2260 if (type == Primitive::kPrimLong) {
2261 __ Dclz(TMP, in);
2262 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07002263 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002264 } else {
2265 __ Clz(TMP, in);
2266 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07002267 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002268 }
2269 // For either value of "type", when "in" is zero, "out" should also
2270 // be zero. Without this extra "and" operation, when "in" is zero,
2271 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
2272 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
2273 // the shift amount (TMP) directly; they use either (TMP % 64) or
2274 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07002275 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07002276}
2277
2278// int java.lang.Integer.highestOneBit(int)
2279void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
2280 CreateIntToIntLocations(arena_, invoke);
2281}
2282
2283void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
2284 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
2285}
2286
2287// long java.lang.Long.highestOneBit(long)
2288void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
2289 CreateIntToIntLocations(arena_, invoke);
2290}
2291
2292void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
2293 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
2294}
2295
2296static void GenLowestOneBit(LocationSummary* locations,
2297 Primitive::Type type,
2298 Mips64Assembler* assembler) {
2299 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << PrettyDescriptor(type);
2300
2301 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2302 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2303
2304 if (type == Primitive::kPrimLong) {
2305 __ Dsubu(TMP, ZERO, in);
2306 } else {
2307 __ Subu(TMP, ZERO, in);
2308 }
2309 __ And(out, TMP, in);
2310}
2311
2312// int java.lang.Integer.lowestOneBit(int)
2313void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
2314 CreateIntToIntLocations(arena_, invoke);
2315}
2316
2317void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
2318 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
2319}
2320
2321// long java.lang.Long.lowestOneBit(long)
2322void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
2323 CreateIntToIntLocations(arena_, invoke);
2324}
2325
2326void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
2327 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
2328}
2329
Chris Larsen74c20582017-03-28 22:17:35 -07002330static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2331 LocationSummary* locations = new (arena) LocationSummary(invoke,
2332 LocationSummary::kCallOnMainOnly,
2333 kIntrinsified);
2334 InvokeRuntimeCallingConvention calling_convention;
2335
2336 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2337 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2338}
2339
2340static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2341 LocationSummary* locations = new (arena) LocationSummary(invoke,
2342 LocationSummary::kCallOnMainOnly,
2343 kIntrinsified);
2344 InvokeRuntimeCallingConvention calling_convention;
2345
2346 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2347 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2348 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2349}
2350
2351static void GenFPToFPCall(HInvoke* invoke,
2352 CodeGeneratorMIPS64* codegen,
2353 QuickEntrypointEnum entry) {
2354 LocationSummary* locations = invoke->GetLocations();
2355 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2356 DCHECK_EQ(in, F12);
2357 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2358 DCHECK_EQ(out, F0);
2359
2360 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2361}
2362
2363static void GenFPFPToFPCall(HInvoke* invoke,
2364 CodeGeneratorMIPS64* codegen,
2365 QuickEntrypointEnum entry) {
2366 LocationSummary* locations = invoke->GetLocations();
2367 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2368 DCHECK_EQ(in0, F12);
2369 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2370 DCHECK_EQ(in1, F13);
2371 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2372 DCHECK_EQ(out, F0);
2373
2374 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2375}
2376
2377// static double java.lang.Math.cos(double a)
2378void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
2379 CreateFPToFPCallLocations(arena_, invoke);
2380}
2381
2382void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2383 GenFPToFPCall(invoke, codegen_, kQuickCos);
2384}
2385
2386// static double java.lang.Math.sin(double a)
2387void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
2388 CreateFPToFPCallLocations(arena_, invoke);
2389}
2390
2391void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2392 GenFPToFPCall(invoke, codegen_, kQuickSin);
2393}
2394
2395// static double java.lang.Math.acos(double a)
2396void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
2397 CreateFPToFPCallLocations(arena_, invoke);
2398}
2399
2400void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2401 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2402}
2403
2404// static double java.lang.Math.asin(double a)
2405void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
2406 CreateFPToFPCallLocations(arena_, invoke);
2407}
2408
2409void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2410 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2411}
2412
2413// static double java.lang.Math.atan(double a)
2414void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
2415 CreateFPToFPCallLocations(arena_, invoke);
2416}
2417
2418void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2419 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2420}
2421
2422// static double java.lang.Math.atan2(double y, double x)
2423void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
2424 CreateFPFPToFPCallLocations(arena_, invoke);
2425}
2426
2427void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2428 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2429}
2430
2431// static double java.lang.Math.cbrt(double a)
2432void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
2433 CreateFPToFPCallLocations(arena_, invoke);
2434}
2435
2436void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2437 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2438}
2439
2440// static double java.lang.Math.cosh(double x)
2441void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
2442 CreateFPToFPCallLocations(arena_, invoke);
2443}
2444
2445void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2446 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2447}
2448
2449// static double java.lang.Math.exp(double a)
2450void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
2451 CreateFPToFPCallLocations(arena_, invoke);
2452}
2453
2454void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2455 GenFPToFPCall(invoke, codegen_, kQuickExp);
2456}
2457
2458// static double java.lang.Math.expm1(double x)
2459void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
2460 CreateFPToFPCallLocations(arena_, invoke);
2461}
2462
2463void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2464 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2465}
2466
2467// static double java.lang.Math.hypot(double x, double y)
2468void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
2469 CreateFPFPToFPCallLocations(arena_, invoke);
2470}
2471
2472void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2473 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2474}
2475
2476// static double java.lang.Math.log(double a)
2477void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
2478 CreateFPToFPCallLocations(arena_, invoke);
2479}
2480
2481void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2482 GenFPToFPCall(invoke, codegen_, kQuickLog);
2483}
2484
2485// static double java.lang.Math.log10(double x)
2486void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
2487 CreateFPToFPCallLocations(arena_, invoke);
2488}
2489
2490void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2491 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2492}
2493
2494// static double java.lang.Math.nextAfter(double start, double direction)
2495void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2496 CreateFPFPToFPCallLocations(arena_, invoke);
2497}
2498
2499void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2500 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2501}
2502
2503// static double java.lang.Math.sinh(double x)
2504void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
2505 CreateFPToFPCallLocations(arena_, invoke);
2506}
2507
2508void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2509 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2510}
2511
2512// static double java.lang.Math.tan(double a)
2513void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
2514 CreateFPToFPCallLocations(arena_, invoke);
2515}
2516
2517void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2518 GenFPToFPCall(invoke, codegen_, kQuickTan);
2519}
2520
2521// static double java.lang.Math.tanh(double x)
2522void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
2523 CreateFPToFPCallLocations(arena_, invoke);
2524}
2525
2526void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2527 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2528}
2529
Chris Larsen5633ce72017-04-10 15:47:40 -07002530// long java.lang.Integer.valueOf(long)
2531void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2532 InvokeRuntimeCallingConvention calling_convention;
2533 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2534 invoke,
2535 codegen_,
2536 calling_convention.GetReturnLocation(Primitive::kPrimNot),
2537 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2538}
2539
2540void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2541 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2542 LocationSummary* locations = invoke->GetLocations();
2543 Mips64Assembler* assembler = GetAssembler();
2544 InstructionCodeGeneratorMIPS64* icodegen =
2545 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2546
2547 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2548 InvokeRuntimeCallingConvention calling_convention;
2549 if (invoke->InputAt(0)->IsConstant()) {
2550 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2551 if (value >= info.low && value <= info.high) {
2552 // Just embed the j.l.Integer in the code.
2553 ScopedObjectAccess soa(Thread::Current());
2554 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2555 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2556 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2557 __ LoadConst64(out, address);
2558 } else {
2559 // Allocate and initialize a new j.l.Integer.
2560 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2561 // JIT object table.
2562 uint32_t address =
2563 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2564 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2565 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2566 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2567 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2568 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2569 // one.
2570 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2571 }
2572 } else {
2573 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2574 Mips64Label allocate, done;
2575 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2576
2577 // Is (info.low <= in) && (in <= info.high)?
2578 __ Addiu32(out, in, -info.low);
2579 // As unsigned quantities is out < (info.high - info.low + 1)?
2580 __ LoadConst32(AT, count);
2581 // Branch if out >= (info.high - info.low + 1).
2582 // This means that "in" is outside of the range [info.low, info.high].
2583 __ Bgeuc(out, AT, &allocate);
2584
2585 // If the value is within the bounds, load the j.l.Integer directly from the array.
2586 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2587 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2588 __ LoadConst64(TMP, data_offset + address);
2589 __ Dlsa(out, out, TMP, TIMES_4);
2590 __ Lwu(out, out, 0);
2591 __ MaybeUnpoisonHeapReference(out);
2592 __ Bc(&done);
2593
2594 __ Bind(&allocate);
2595 // Otherwise allocate and initialize a new j.l.Integer.
2596 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2597 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2598 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2599 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2600 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2601 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2602 // one.
2603 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2604 __ Bind(&done);
2605 }
2606}
2607
Aart Bik2f9fcc92016-03-01 15:16:54 -08002608UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002609UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002610
Aart Bikff7d89c2016-11-07 08:49:28 -08002611UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2612UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002613UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2614UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2615UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2616UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2617UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2618UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002619
Aart Bik0e54c012016-03-04 12:08:31 -08002620// 1.8.
2621UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2622UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2623UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2624UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2625UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002626
Nicolas Geoffray365719c2017-03-08 13:11:50 +00002627UNIMPLEMENTED_INTRINSIC(MIPS64, ThreadInterrupted)
2628
Aart Bik2f9fcc92016-03-01 15:16:54 -08002629UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002630
2631#undef __
2632
2633} // namespace mips64
2634} // namespace art