blob: a476b2bc25d9738ab9c0143dded293ef08f31160 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips64/assembler_mips64.h"
28#include "utils/mips64/constants_mips64.h"
29
30namespace art {
31
32namespace mips64 {
33
34IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Chris Larsen5633ce72017-04-10 15:47:40 -070035 : codegen_(codegen), arena_(codegen->GetGraph()->GetArena()) {
Chris Larsen3039e382015-08-26 07:54:08 -070036}
37
38Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
39 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsen9701c2e2015-09-04 17:22:47 -070046#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS64* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
76 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
89 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000090 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
91 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070092
93 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
94 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
95
96 __ Bind(GetEntryLabel());
97
98 SaveLiveRegisters(codegen, invoke_->GetLocations());
99
100 MoveArguments(invoke_, codegen);
101
102 if (invoke_->IsInvokeStaticOrDirect()) {
103 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
104 Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700105 } else {
Alexey Frunze53afca12015-11-05 16:34:23 -0800106 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700107 }
Alexey Frunze53afca12015-11-05 16:34:23 -0800108 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700119 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
129};
130
131#undef __
132
Chris Larsen3039e382015-08-26 07:54:08 -0700133bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
150 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
151 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
152
153 if (is64bit) {
154 __ Dmfc1(out, in);
155 } else {
156 __ Mfc1(out, in);
157 }
158}
159
160// long java.lang.Double.doubleToRawLongBits(double)
161void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
162 CreateFPToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000166 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700167}
168
169// int java.lang.Float.floatToRawIntBits(float)
170void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
171 CreateFPToIntLocations(arena_, invoke);
172}
173
174void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000175 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
187 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
188 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
189
190 if (is64bit) {
191 __ Dmtc1(in, out);
192 } else {
193 __ Mtc1(in, out);
194 }
195}
196
197// double java.lang.Double.longBitsToDouble(long)
198void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000203 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700204}
205
206// float java.lang.Float.intBitsToFloat(int)
207void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000212 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
224 Primitive::Type type,
225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
230 case Primitive::kPrimShort:
231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
234 case Primitive::kPrimInt:
235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
238 case Primitive::kPrimLong:
239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
259 CreateIntToIntLocations(arena_, invoke);
260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
263 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
268 CreateIntToIntLocations(arena_, invoke);
269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
272 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
299 CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 CreateIntToIntLocations(arena_, invoke);
328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
336 CreateIntToIntLocations(arena_, invoke);
337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
343static void GenReverse(LocationSummary* locations,
344 Primitive::Type type,
345 Mips64Assembler* assembler) {
346 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
347
348 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
349 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
350
351 if (type == Primitive::kPrimInt) {
352 __ Rotr(out, in, 16);
353 __ Wsbh(out, out);
354 __ Bitswap(out, out);
355 } else {
356 __ Dsbh(out, in);
357 __ Dshd(out, out);
358 __ Dbitswap(out, out);
359 }
360}
361
362// int java.lang.Integer.reverse(int)
363void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
368 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
369}
370
371// long java.lang.Long.reverse(long)
372void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
373 CreateIntToIntLocations(arena_, invoke);
374}
375
376void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
377 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
378}
379
Chris Larsen0b7ac982015-09-04 12:54:28 -0700380static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
381 LocationSummary* locations = new (arena) LocationSummary(invoke,
382 LocationSummary::kNoCall,
383 kIntrinsified);
384 locations->SetInAt(0, Location::RequiresFpuRegister());
385 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
386}
387
Chris Larsen7fda7852016-04-21 16:00:36 -0700388static void GenBitCount(LocationSummary* locations,
389 const Primitive::Type type,
390 Mips64Assembler* assembler) {
391 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
392 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
393
394 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
395
396 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
397 //
398 // A generalization of the best bit counting method to integers of
399 // bit-widths up to 128 (parameterized by type T) is this:
400 //
401 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
402 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
403 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
404 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
405 //
406 // For comparison, for 32-bit quantities, this algorithm can be executed
407 // using 20 MIPS instructions (the calls to LoadConst32() generate two
408 // machine instructions each for the values being used in this algorithm).
409 // A(n unrolled) loop-based algorithm requires 25 instructions.
410 //
411 // For a 64-bit operand this can be performed in 24 instructions compared
412 // to a(n unrolled) loop based algorithm which requires 38 instructions.
413 //
414 // There are algorithms which are faster in the cases where very few
415 // bits are set but the algorithm here attempts to minimize the total
416 // number of instructions executed even when a large number of bits
417 // are set.
418
419 if (type == Primitive::kPrimInt) {
420 __ Srl(TMP, in, 1);
421 __ LoadConst32(AT, 0x55555555);
422 __ And(TMP, TMP, AT);
423 __ Subu(TMP, in, TMP);
424 __ LoadConst32(AT, 0x33333333);
425 __ And(out, TMP, AT);
426 __ Srl(TMP, TMP, 2);
427 __ And(TMP, TMP, AT);
428 __ Addu(TMP, out, TMP);
429 __ Srl(out, TMP, 4);
430 __ Addu(out, out, TMP);
431 __ LoadConst32(AT, 0x0F0F0F0F);
432 __ And(out, out, AT);
433 __ LoadConst32(TMP, 0x01010101);
434 __ MulR6(out, out, TMP);
435 __ Srl(out, out, 24);
436 } else if (type == Primitive::kPrimLong) {
437 __ Dsrl(TMP, in, 1);
438 __ LoadConst64(AT, 0x5555555555555555L);
439 __ And(TMP, TMP, AT);
440 __ Dsubu(TMP, in, TMP);
441 __ LoadConst64(AT, 0x3333333333333333L);
442 __ And(out, TMP, AT);
443 __ Dsrl(TMP, TMP, 2);
444 __ And(TMP, TMP, AT);
445 __ Daddu(TMP, out, TMP);
446 __ Dsrl(out, TMP, 4);
447 __ Daddu(out, out, TMP);
448 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
449 __ And(out, out, AT);
450 __ LoadConst64(TMP, 0x0101010101010101L);
451 __ Dmul(out, out, TMP);
452 __ Dsrl32(out, out, 24);
453 }
454}
455
456// int java.lang.Integer.bitCount(int)
457void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
458 CreateIntToIntLocations(arena_, invoke);
459}
460
461void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
462 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
463}
464
465// int java.lang.Long.bitCount(long)
466void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
467 CreateIntToIntLocations(arena_, invoke);
468}
469
470void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
471 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
472}
473
Chris Larsen0b7ac982015-09-04 12:54:28 -0700474static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
475 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
476 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
477
478 if (is64bit) {
479 __ AbsD(out, in);
480 } else {
481 __ AbsS(out, in);
482 }
483}
484
485// double java.lang.Math.abs(double)
486void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
487 CreateFPToFPLocations(arena_, invoke);
488}
489
490void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000491 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492}
493
494// float java.lang.Math.abs(float)
495void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
496 CreateFPToFPLocations(arena_, invoke);
497}
498
499void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000500 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700501}
502
503static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
504 LocationSummary* locations = new (arena) LocationSummary(invoke,
505 LocationSummary::kNoCall,
506 kIntrinsified);
507 locations->SetInAt(0, Location::RequiresRegister());
508 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
509}
510
511static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
512 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
513 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
514
515 if (is64bit) {
516 __ Dsra32(AT, in, 31);
517 __ Xor(out, in, AT);
518 __ Dsubu(out, out, AT);
519 } else {
520 __ Sra(AT, in, 31);
521 __ Xor(out, in, AT);
522 __ Subu(out, out, AT);
523 }
524}
525
526// int java.lang.Math.abs(int)
527void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
528 CreateIntToInt(arena_, invoke);
529}
530
531void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000532 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700533}
534
535// long java.lang.Math.abs(long)
536void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
537 CreateIntToInt(arena_, invoke);
538}
539
540void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000541 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700542}
543
544static void GenMinMaxFP(LocationSummary* locations,
545 bool is_min,
Chris Larsenb74353a2015-11-20 09:07:09 -0800546 Primitive::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700547 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800548 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
549 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700550 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
551
Chris Larsenb74353a2015-11-20 09:07:09 -0800552 Mips64Label noNaNs;
553 Mips64Label done;
554 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
555
556 // When Java computes min/max it prefers a NaN to a number; the
557 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
558 // the inputs is a NaN and the other is a valid number, the MIPS
559 // instruction will return the number; Java wants the NaN value
560 // returned. This is why there is extra logic preceding the use of
561 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
562 // NaN, return the NaN, otherwise return the min/max.
563 if (type == Primitive::kPrimDouble) {
564 __ CmpUnD(FTMP, a, b);
565 __ Bc1eqz(FTMP, &noNaNs);
566
567 // One of the inputs is a NaN
568 __ CmpEqD(ftmp, a, a);
569 // If a == a then b is the NaN, otherwise a is the NaN.
570 __ SelD(ftmp, a, b);
571
572 if (ftmp != out) {
573 __ MovD(out, ftmp);
574 }
575
576 __ Bc(&done);
577
578 __ Bind(&noNaNs);
579
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800581 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700582 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800583 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700584 }
585 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800586 DCHECK_EQ(type, Primitive::kPrimFloat);
587 __ CmpUnS(FTMP, a, b);
588 __ Bc1eqz(FTMP, &noNaNs);
589
590 // One of the inputs is a NaN
591 __ CmpEqS(ftmp, a, a);
592 // If a == a then b is the NaN, otherwise a is the NaN.
593 __ SelS(ftmp, a, b);
594
595 if (ftmp != out) {
596 __ MovS(out, ftmp);
597 }
598
599 __ Bc(&done);
600
601 __ Bind(&noNaNs);
602
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800604 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700605 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800606 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700607 }
608 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800609
610 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700611}
612
613static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
614 LocationSummary* locations = new (arena) LocationSummary(invoke,
615 LocationSummary::kNoCall,
616 kIntrinsified);
617 locations->SetInAt(0, Location::RequiresFpuRegister());
618 locations->SetInAt(1, Location::RequiresFpuRegister());
619 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
620}
621
622// double java.lang.Math.min(double, double)
623void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
624 CreateFPFPToFPLocations(arena_, invoke);
625}
626
627void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800628 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700629}
630
631// float java.lang.Math.min(float, float)
632void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
633 CreateFPFPToFPLocations(arena_, invoke);
634}
635
636void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800637 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700638}
639
640// double java.lang.Math.max(double, double)
641void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
642 CreateFPFPToFPLocations(arena_, invoke);
643}
644
645void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800646 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700647}
648
649// float java.lang.Math.max(float, float)
650void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
651 CreateFPFPToFPLocations(arena_, invoke);
652}
653
654void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800655 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700656}
657
658static void GenMinMax(LocationSummary* locations,
659 bool is_min,
660 Mips64Assembler* assembler) {
661 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
662 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
663 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
664
Chris Larsenb74353a2015-11-20 09:07:09 -0800665 if (lhs == rhs) {
666 if (out != lhs) {
667 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700668 }
669 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800670 // Some architectures, such as ARM and MIPS (prior to r6), have a
671 // conditional move instruction which only changes the target
672 // (output) register if the condition is true (MIPS prior to r6 had
673 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
674 // change the target (output) register. If the condition is true the
675 // output register gets the contents of the "rs" register; otherwise,
676 // the output register is set to zero. One consequence of this is
677 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
678 // needs to use a pair of SELEQZ/SELNEZ instructions. After
679 // executing this pair of instructions one of the output registers
680 // from the pair will necessarily contain zero. Then the code ORs the
681 // output registers from the SELEQZ/SELNEZ instructions to get the
682 // final result.
683 //
684 // The initial test to see if the output register is same as the
685 // first input register is needed to make sure that value in the
686 // first input register isn't clobbered before we've finished
687 // computing the output value. The logic in the corresponding else
688 // clause performs the same task but makes sure the second input
689 // register isn't clobbered in the event that it's the same register
690 // as the output register; the else clause also handles the case
691 // where the output register is distinct from both the first, and the
692 // second input registers.
693 if (out == lhs) {
694 __ Slt(AT, rhs, lhs);
695 if (is_min) {
696 __ Seleqz(out, lhs, AT);
697 __ Selnez(AT, rhs, AT);
698 } else {
699 __ Selnez(out, lhs, AT);
700 __ Seleqz(AT, rhs, AT);
701 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700702 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800703 __ Slt(AT, lhs, rhs);
704 if (is_min) {
705 __ Seleqz(out, rhs, AT);
706 __ Selnez(AT, lhs, AT);
707 } else {
708 __ Selnez(out, rhs, AT);
709 __ Seleqz(AT, lhs, AT);
710 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700711 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800712 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700713 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700714}
715
716static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
717 LocationSummary* locations = new (arena) LocationSummary(invoke,
718 LocationSummary::kNoCall,
719 kIntrinsified);
720 locations->SetInAt(0, Location::RequiresRegister());
721 locations->SetInAt(1, Location::RequiresRegister());
722 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
723}
724
725// int java.lang.Math.min(int, int)
726void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
727 CreateIntIntToIntLocations(arena_, invoke);
728}
729
730void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000731 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700732}
733
734// long java.lang.Math.min(long, long)
735void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
736 CreateIntIntToIntLocations(arena_, invoke);
737}
738
739void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000740 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700741}
742
743// int java.lang.Math.max(int, int)
744void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
745 CreateIntIntToIntLocations(arena_, invoke);
746}
747
748void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000749 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700750}
751
752// long java.lang.Math.max(long, long)
753void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
754 CreateIntIntToIntLocations(arena_, invoke);
755}
756
757void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700759}
760
761// double java.lang.Math.sqrt(double)
762void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
763 CreateFPToFPLocations(arena_, invoke);
764}
765
766void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
767 LocationSummary* locations = invoke->GetLocations();
768 Mips64Assembler* assembler = GetAssembler();
769 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
770 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
771
772 __ SqrtD(out, in);
773}
774
Chris Larsen81284372015-10-21 15:28:53 -0700775static void CreateFPToFP(ArenaAllocator* arena,
776 HInvoke* invoke,
777 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700778 LocationSummary* locations = new (arena) LocationSummary(invoke,
779 LocationSummary::kNoCall,
780 kIntrinsified);
781 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700782 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700783}
784
785// double java.lang.Math.rint(double)
786void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700787 CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700788}
789
790void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
791 LocationSummary* locations = invoke->GetLocations();
792 Mips64Assembler* assembler = GetAssembler();
793 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
794 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
795
796 __ RintD(out, in);
797}
798
799// double java.lang.Math.floor(double)
800void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
801 CreateFPToFP(arena_, invoke);
802}
803
Chris Larsen14500822015-10-01 11:35:18 -0700804const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
805 kPositiveInfinity |
806 kNegativeZero |
807 kNegativeInfinity |
808 kQuietNaN |
809 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700810
Chris Larsen81284372015-10-21 15:28:53 -0700811enum FloatRoundingMode {
812 kFloor,
813 kCeil,
814};
815
816static void GenRoundingMode(LocationSummary* locations,
817 FloatRoundingMode mode,
818 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700819 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
820 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
821
Chris Larsen81284372015-10-21 15:28:53 -0700822 DCHECK_NE(in, out);
823
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700824 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700825
Chris Larsen81284372015-10-21 15:28:53 -0700826 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700827 // if in.isNaN || in.isInfinite || in.isZero {
828 // return in;
829 // }
830 __ ClassD(out, in);
831 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700832 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700833 __ MovD(out, in);
834 __ Bnezc(AT, &done);
835
Chris Larsen81284372015-10-21 15:28:53 -0700836 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200837 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700838 // // floor()/ceil() has almost certainly returned a value
839 // // which can't be successfully represented as a signed
840 // // 64-bit number. Java expects that the input value will
841 // // be returned in these cases.
842 // // There is also a small probability that floor(in)/ceil(in)
843 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200844 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
845 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700846 // return in;
847 // }
Chris Larsen81284372015-10-21 15:28:53 -0700848 if (mode == kFloor) {
849 __ FloorLD(out, in);
850 } else if (mode == kCeil) {
851 __ CeilLD(out, in);
852 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700853 __ Dmfc1(AT, out);
854 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200855 __ Daddiu(TMP, AT, 1);
856 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
857 // or AT - 0x7FFF FFFF FFFF FFFF.
858 // IOW, TMP = 1 if AT = Long.MIN_VALUE
859 // or TMP = 0 if AT = Long.MAX_VALUE.
860 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
861 // or AT = Long.MAX_VALUE.
862 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700863
864 // double out = outLong;
865 // return out;
866 __ Dmtc1(AT, out);
867 __ Cvtdl(out, out);
868 __ Bind(&done);
869 // }
870}
871
Chris Larsen81284372015-10-21 15:28:53 -0700872void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
873 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
874}
875
Chris Larsen0b7ac982015-09-04 12:54:28 -0700876// double java.lang.Math.ceil(double)
877void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
878 CreateFPToFP(arena_, invoke);
879}
880
881void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700882 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700883}
884
Chris Larsen7adaab02016-04-21 14:49:20 -0700885static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, Primitive::Type type) {
886 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
887 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
888 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
889
890 DCHECK(type == Primitive::kPrimFloat || type == Primitive::kPrimDouble);
891
892 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700893
Chris Larsen7adaab02016-04-21 14:49:20 -0700894 // out = floor(in);
895 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200896 // if (out != MAX_VALUE && out != MIN_VALUE) {
897 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700898 // return out += TMP;
899 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200900 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700901
902 // out = floor(in);
903 if (type == Primitive::kPrimDouble) {
904 __ FloorLD(FTMP, in);
905 __ Dmfc1(out, FTMP);
906 } else {
907 __ FloorWS(FTMP, in);
908 __ Mfc1(out, FTMP);
909 }
910
Lena Djokicf4e23a82017-05-09 15:43:45 +0200911 // if (out != MAX_VALUE && out != MIN_VALUE)
Chris Larsen7adaab02016-04-21 14:49:20 -0700912 if (type == Primitive::kPrimDouble) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200913 __ Daddiu(TMP, out, 1);
914 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
915 // or out - 0x7FFF FFFF FFFF FFFF.
916 // IOW, TMP = 1 if out = Long.MIN_VALUE
917 // or TMP = 0 if out = Long.MAX_VALUE.
918 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
919 // or out = Long.MAX_VALUE.
920 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700921 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200922 __ Addiu(TMP, out, 1);
923 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
924 // or out - 0x7FFF FFFF.
925 // IOW, TMP = 1 if out = Int.MIN_VALUE
926 // or TMP = 0 if out = Int.MAX_VALUE.
927 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
928 // or out = Int.MAX_VALUE.
929 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700930 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700931
932 // TMP = (0.5 <= (in - out)) ? -1 : 0;
933 if (type == Primitive::kPrimDouble) {
934 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
935 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
936 __ SubD(FTMP, in, FTMP);
937 __ Dmtc1(AT, half);
938 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200939 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700940 } else {
941 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
942 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
943 __ SubS(FTMP, in, FTMP);
944 __ Mtc1(AT, half);
945 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200946 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700947 }
948
Chris Larsen7adaab02016-04-21 14:49:20 -0700949 // Return out -= TMP.
950 if (type == Primitive::kPrimDouble) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200951 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700952 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200953 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700954 }
955
956 __ Bind(&done);
957}
958
959// int java.lang.Math.round(float)
960void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
961 LocationSummary* locations = new (arena_) LocationSummary(invoke,
962 LocationSummary::kNoCall,
963 kIntrinsified);
964 locations->SetInAt(0, Location::RequiresFpuRegister());
965 locations->AddTemp(Location::RequiresFpuRegister());
966 locations->SetOut(Location::RequiresRegister());
967}
968
969void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
970 GenRound(invoke->GetLocations(), GetAssembler(), Primitive::kPrimFloat);
971}
972
973// long java.lang.Math.round(double)
974void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
975 LocationSummary* locations = new (arena_) LocationSummary(invoke,
976 LocationSummary::kNoCall,
977 kIntrinsified);
978 locations->SetInAt(0, Location::RequiresFpuRegister());
979 locations->AddTemp(Location::RequiresFpuRegister());
980 locations->SetOut(Location::RequiresRegister());
981}
982
983void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
984 GenRound(invoke->GetLocations(), GetAssembler(), Primitive::kPrimDouble);
985}
986
Chris Larsen70fb1f42015-09-04 10:15:27 -0700987// byte libcore.io.Memory.peekByte(long address)
988void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
989 CreateIntToIntLocations(arena_, invoke);
990}
991
992void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
993 Mips64Assembler* assembler = GetAssembler();
994 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
995 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
996
997 __ Lb(out, adr, 0);
998}
999
1000// short libcore.io.Memory.peekShort(long address)
1001void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
1002 CreateIntToIntLocations(arena_, invoke);
1003}
1004
1005void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
1006 Mips64Assembler* assembler = GetAssembler();
1007 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1008 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1009
1010 __ Lh(out, adr, 0);
1011}
1012
1013// int libcore.io.Memory.peekInt(long address)
1014void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
1015 CreateIntToIntLocations(arena_, invoke);
1016}
1017
1018void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
1019 Mips64Assembler* assembler = GetAssembler();
1020 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1021 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1022
1023 __ Lw(out, adr, 0);
1024}
1025
1026// long libcore.io.Memory.peekLong(long address)
1027void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
1028 CreateIntToIntLocations(arena_, invoke);
1029}
1030
1031void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
1032 Mips64Assembler* assembler = GetAssembler();
1033 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1034 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1035
1036 __ Ld(out, adr, 0);
1037}
1038
1039static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1040 LocationSummary* locations = new (arena) LocationSummary(invoke,
1041 LocationSummary::kNoCall,
1042 kIntrinsified);
1043 locations->SetInAt(0, Location::RequiresRegister());
1044 locations->SetInAt(1, Location::RequiresRegister());
1045}
1046
1047// void libcore.io.Memory.pokeByte(long address, byte value)
1048void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
1049 CreateIntIntToVoidLocations(arena_, invoke);
1050}
1051
1052void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
1053 Mips64Assembler* assembler = GetAssembler();
1054 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1055 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1056
1057 __ Sb(val, adr, 0);
1058}
1059
1060// void libcore.io.Memory.pokeShort(long address, short value)
1061void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1062 CreateIntIntToVoidLocations(arena_, invoke);
1063}
1064
1065void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1066 Mips64Assembler* assembler = GetAssembler();
1067 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1068 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1069
1070 __ Sh(val, adr, 0);
1071}
1072
1073// void libcore.io.Memory.pokeInt(long address, int value)
1074void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1075 CreateIntIntToVoidLocations(arena_, invoke);
1076}
1077
1078void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1079 Mips64Assembler* assembler = GetAssembler();
1080 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1081 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1082
1083 __ Sw(val, adr, 00);
1084}
1085
1086// void libcore.io.Memory.pokeLong(long address, long value)
1087void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1088 CreateIntIntToVoidLocations(arena_, invoke);
1089}
1090
1091void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1092 Mips64Assembler* assembler = GetAssembler();
1093 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1094 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1095
1096 __ Sd(val, adr, 0);
1097}
1098
Chris Larsen49e55392015-09-04 16:04:03 -07001099// Thread java.lang.Thread.currentThread()
1100void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1101 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1102 LocationSummary::kNoCall,
1103 kIntrinsified);
1104 locations->SetOut(Location::RequiresRegister());
1105}
1106
1107void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1108 Mips64Assembler* assembler = GetAssembler();
1109 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1110
1111 __ LoadFromOffset(kLoadUnsignedWord,
1112 out,
1113 TR,
1114 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1115}
1116
Alexey Frunze15958152017-02-09 19:08:30 -08001117static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1118 HInvoke* invoke,
1119 Primitive::Type type) {
1120 bool can_call = kEmitCompilerReadBarrier &&
1121 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1122 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Chris Larsen1360ada2015-09-04 23:38:16 -07001123 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001124 (can_call
1125 ? LocationSummary::kCallOnSlowPath
1126 : LocationSummary::kNoCall),
Chris Larsen1360ada2015-09-04 23:38:16 -07001127 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001128 if (can_call && kUseBakerReadBarrier) {
1129 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1130 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001131 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1132 locations->SetInAt(1, Location::RequiresRegister());
1133 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001134 locations->SetOut(Location::RequiresRegister(),
1135 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
1136 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1137 // We need a temporary register for the read barrier marking slow
1138 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
1139 locations->AddTemp(Location::RequiresRegister());
1140 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001141}
1142
Alexey Frunze15958152017-02-09 19:08:30 -08001143// Note that the caller must supply a properly aligned memory address.
1144// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001145static void GenUnsafeGet(HInvoke* invoke,
1146 Primitive::Type type,
1147 bool is_volatile,
1148 CodeGeneratorMIPS64* codegen) {
1149 LocationSummary* locations = invoke->GetLocations();
1150 DCHECK((type == Primitive::kPrimInt) ||
1151 (type == Primitive::kPrimLong) ||
Alexey Frunze15958152017-02-09 19:08:30 -08001152 (type == Primitive::kPrimNot)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -07001153 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001154 // Target register.
1155 Location trg_loc = locations->Out();
1156 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001157 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001158 Location base_loc = locations->InAt(1);
1159 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001160 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001161 Location offset_loc = locations->InAt(2);
1162 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001163
Alexey Frunze15958152017-02-09 19:08:30 -08001164 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == Primitive::kPrimNot))) {
1165 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -07001166 }
Alexey Frunze15958152017-02-09 19:08:30 -08001167
Chris Larsen1360ada2015-09-04 23:38:16 -07001168 switch (type) {
Alexey Frunze15958152017-02-09 19:08:30 -08001169 case Primitive::kPrimLong:
1170 __ Ld(trg, TMP, 0);
1171 if (is_volatile) {
1172 __ Sync(0);
1173 }
1174 break;
1175
Chris Larsen1360ada2015-09-04 23:38:16 -07001176 case Primitive::kPrimInt:
1177 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -08001178 if (is_volatile) {
1179 __ Sync(0);
1180 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001181 break;
1182
1183 case Primitive::kPrimNot:
Alexey Frunze15958152017-02-09 19:08:30 -08001184 if (kEmitCompilerReadBarrier) {
1185 if (kUseBakerReadBarrier) {
1186 Location temp = locations->GetTemp(0);
1187 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1188 trg_loc,
1189 base,
1190 /* offset */ 0U,
1191 /* index */ offset_loc,
1192 TIMES_1,
1193 temp,
1194 /* needs_null_check */ false);
1195 if (is_volatile) {
1196 __ Sync(0);
1197 }
1198 } else {
1199 __ Lwu(trg, TMP, 0);
1200 if (is_volatile) {
1201 __ Sync(0);
1202 }
1203 codegen->GenerateReadBarrierSlow(invoke,
1204 trg_loc,
1205 trg_loc,
1206 base_loc,
1207 /* offset */ 0U,
1208 /* index */ offset_loc);
1209 }
1210 } else {
1211 __ Lwu(trg, TMP, 0);
1212 if (is_volatile) {
1213 __ Sync(0);
1214 }
1215 __ MaybeUnpoisonHeapReference(trg);
1216 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001217 break;
1218
1219 default:
1220 LOG(FATAL) << "Unsupported op size " << type;
1221 UNREACHABLE();
1222 }
1223}
1224
1225// int sun.misc.Unsafe.getInt(Object o, long offset)
1226void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001227 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen1360ada2015-09-04 23:38:16 -07001228}
1229
1230void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001231 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001232}
1233
1234// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1235void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001236 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen1360ada2015-09-04 23:38:16 -07001237}
1238
1239void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001240 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001241}
1242
1243// long sun.misc.Unsafe.getLong(Object o, long offset)
1244void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001245 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen1360ada2015-09-04 23:38:16 -07001246}
1247
1248void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001249 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001250}
1251
1252// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1253void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001254 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen1360ada2015-09-04 23:38:16 -07001255}
1256
1257void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001258 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001259}
1260
1261// Object sun.misc.Unsafe.getObject(Object o, long offset)
1262void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001263 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen1360ada2015-09-04 23:38:16 -07001264}
1265
1266void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001267 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001268}
1269
1270// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1271void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001272 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen1360ada2015-09-04 23:38:16 -07001273}
1274
1275void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001276 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001277}
1278
1279static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1280 LocationSummary* locations = new (arena) LocationSummary(invoke,
1281 LocationSummary::kNoCall,
1282 kIntrinsified);
1283 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1284 locations->SetInAt(1, Location::RequiresRegister());
1285 locations->SetInAt(2, Location::RequiresRegister());
1286 locations->SetInAt(3, Location::RequiresRegister());
1287}
1288
Alexey Frunze15958152017-02-09 19:08:30 -08001289// Note that the caller must supply a properly aligned memory address.
1290// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001291static void GenUnsafePut(LocationSummary* locations,
1292 Primitive::Type type,
1293 bool is_volatile,
1294 bool is_ordered,
1295 CodeGeneratorMIPS64* codegen) {
1296 DCHECK((type == Primitive::kPrimInt) ||
1297 (type == Primitive::kPrimLong) ||
1298 (type == Primitive::kPrimNot));
1299 Mips64Assembler* assembler = codegen->GetAssembler();
1300 // Object pointer.
1301 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1302 // Long offset.
1303 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1304 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1305
1306 __ Daddu(TMP, base, offset);
1307 if (is_volatile || is_ordered) {
1308 __ Sync(0);
1309 }
1310 switch (type) {
1311 case Primitive::kPrimInt:
1312 case Primitive::kPrimNot:
Alexey Frunzec061de12017-02-14 13:27:23 -08001313 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1314 __ PoisonHeapReference(AT, value);
1315 __ Sw(AT, TMP, 0);
1316 } else {
1317 __ Sw(value, TMP, 0);
1318 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001319 break;
1320
1321 case Primitive::kPrimLong:
1322 __ Sd(value, TMP, 0);
1323 break;
1324
1325 default:
1326 LOG(FATAL) << "Unsupported op size " << type;
1327 UNREACHABLE();
1328 }
1329 if (is_volatile) {
1330 __ Sync(0);
1331 }
1332
1333 if (type == Primitive::kPrimNot) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001334 bool value_can_be_null = true; // TODO: Worth finding out this information?
1335 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001336 }
1337}
1338
1339// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1340void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1341 CreateIntIntIntIntToVoid(arena_, invoke);
1342}
1343
1344void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001345 GenUnsafePut(invoke->GetLocations(),
1346 Primitive::kPrimInt,
1347 /* is_volatile */ false,
1348 /* is_ordered */ false,
1349 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001350}
1351
1352// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1353void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1354 CreateIntIntIntIntToVoid(arena_, invoke);
1355}
1356
1357void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001358 GenUnsafePut(invoke->GetLocations(),
1359 Primitive::kPrimInt,
1360 /* is_volatile */ false,
1361 /* is_ordered */ true,
1362 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001363}
1364
1365// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1366void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1367 CreateIntIntIntIntToVoid(arena_, invoke);
1368}
1369
1370void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001371 GenUnsafePut(invoke->GetLocations(),
1372 Primitive::kPrimInt,
1373 /* is_volatile */ true,
1374 /* is_ordered */ false,
1375 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001376}
1377
1378// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1379void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1380 CreateIntIntIntIntToVoid(arena_, invoke);
1381}
1382
1383void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001384 GenUnsafePut(invoke->GetLocations(),
1385 Primitive::kPrimNot,
1386 /* is_volatile */ false,
1387 /* is_ordered */ false,
1388 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001389}
1390
1391// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1392void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1393 CreateIntIntIntIntToVoid(arena_, invoke);
1394}
1395
1396void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001397 GenUnsafePut(invoke->GetLocations(),
1398 Primitive::kPrimNot,
1399 /* is_volatile */ false,
1400 /* is_ordered */ true,
1401 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001402}
1403
1404// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1405void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1406 CreateIntIntIntIntToVoid(arena_, invoke);
1407}
1408
1409void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001410 GenUnsafePut(invoke->GetLocations(),
1411 Primitive::kPrimNot,
1412 /* is_volatile */ true,
1413 /* is_ordered */ false,
1414 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001415}
1416
1417// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1418void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1419 CreateIntIntIntIntToVoid(arena_, invoke);
1420}
1421
1422void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001423 GenUnsafePut(invoke->GetLocations(),
1424 Primitive::kPrimLong,
1425 /* is_volatile */ false,
1426 /* is_ordered */ false,
1427 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001428}
1429
1430// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1431void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1432 CreateIntIntIntIntToVoid(arena_, invoke);
1433}
1434
1435void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001436 GenUnsafePut(invoke->GetLocations(),
1437 Primitive::kPrimLong,
1438 /* is_volatile */ false,
1439 /* is_ordered */ true,
1440 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001441}
1442
1443// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1444void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1445 CreateIntIntIntIntToVoid(arena_, invoke);
1446}
1447
1448void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001449 GenUnsafePut(invoke->GetLocations(),
1450 Primitive::kPrimLong,
1451 /* is_volatile */ true,
1452 /* is_ordered */ false,
1453 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001454}
1455
Alexey Frunze15958152017-02-09 19:08:30 -08001456static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, HInvoke* invoke) {
1457 bool can_call = kEmitCompilerReadBarrier &&
1458 kUseBakerReadBarrier &&
1459 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Chris Larsen36427492015-10-23 02:19:38 -07001460 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001461 (can_call
1462 ? LocationSummary::kCallOnSlowPath
1463 : LocationSummary::kNoCall),
Chris Larsen36427492015-10-23 02:19:38 -07001464 kIntrinsified);
1465 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1466 locations->SetInAt(1, Location::RequiresRegister());
1467 locations->SetInAt(2, Location::RequiresRegister());
1468 locations->SetInAt(3, Location::RequiresRegister());
1469 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001470 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001471
1472 // Temporary register used in CAS by (Baker) read barrier.
1473 if (can_call) {
1474 locations->AddTemp(Location::RequiresRegister());
1475 }
Chris Larsen36427492015-10-23 02:19:38 -07001476}
1477
Alexey Frunze15958152017-02-09 19:08:30 -08001478// Note that the caller must supply a properly aligned memory address.
1479// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
1480static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001481 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001482 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001483 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001484 Location offset_loc = locations->InAt(2);
1485 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001486 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1487 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001488 Location out_loc = locations->Out();
1489 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001490
1491 DCHECK_NE(base, out);
1492 DCHECK_NE(offset, out);
1493 DCHECK_NE(expected, out);
1494
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001495 if (type == Primitive::kPrimNot) {
Alexey Frunze15958152017-02-09 19:08:30 -08001496 // The only read barrier implementation supporting the
1497 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1498 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1499
1500 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1501 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001502 bool value_can_be_null = true; // TODO: Worth finding out this information?
1503 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001504
1505 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1506 Location temp = locations->GetTemp(0);
1507 // Need to make sure the reference stored in the field is a to-space
1508 // one before attempting the CAS or the CAS could fail incorrectly.
1509 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1510 invoke,
1511 out_loc, // Unused, used only as a "temporary" within the read barrier.
1512 base,
1513 /* offset */ 0u,
1514 /* index */ offset_loc,
1515 ScaleFactor::TIMES_1,
1516 temp,
1517 /* needs_null_check */ false,
1518 /* always_update_field */ true);
1519 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001520 }
1521
Alexey Frunzec061de12017-02-14 13:27:23 -08001522 Mips64Label loop_head, exit_loop;
1523 __ Daddu(TMP, base, offset);
1524
1525 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1526 __ PoisonHeapReference(expected);
1527 // Do not poison `value`, if it is the same register as
1528 // `expected`, which has just been poisoned.
1529 if (value != expected) {
1530 __ PoisonHeapReference(value);
1531 }
1532 }
1533
Chris Larsen36427492015-10-23 02:19:38 -07001534 // do {
1535 // tmp_value = [tmp_ptr] - expected;
1536 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1537 // result = tmp_value != 0;
1538
Chris Larsen36427492015-10-23 02:19:38 -07001539 __ Sync(0);
1540 __ Bind(&loop_head);
1541 if (type == Primitive::kPrimLong) {
1542 __ Lld(out, TMP);
1543 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001544 // Note: We will need a read barrier here, when read barrier
1545 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001546 __ Ll(out, TMP);
Alexey Frunzec061de12017-02-14 13:27:23 -08001547 if (type == Primitive::kPrimNot) {
1548 // The LL instruction sign-extends the 32-bit value, but
1549 // 32-bit references must be zero-extended. Zero-extend `out`.
1550 __ Dext(out, out, 0, 32);
1551 }
Chris Larsen36427492015-10-23 02:19:38 -07001552 }
1553 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1554 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1555 __ Beqzc(out, &exit_loop); // return.
1556 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1557 // If we use 'value' directly, we would lose 'value'
1558 // in the case that the store fails. Whether the
1559 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001560 // correct Boolean value into the 'out' register.
Chris Larsen36427492015-10-23 02:19:38 -07001561 if (type == Primitive::kPrimLong) {
1562 __ Scd(out, TMP);
1563 } else {
1564 __ Sc(out, TMP);
1565 }
1566 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1567 // cycle atomically then retry.
1568 __ Bind(&exit_loop);
1569 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001570
1571 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1572 __ UnpoisonHeapReference(expected);
1573 // Do not unpoison `value`, if it is the same register as
1574 // `expected`, which has just been unpoisoned.
1575 if (value != expected) {
1576 __ UnpoisonHeapReference(value);
1577 }
1578 }
Chris Larsen36427492015-10-23 02:19:38 -07001579}
1580
1581// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1582void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001583 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001584}
1585
1586void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001587 GenCas(invoke, Primitive::kPrimInt, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001588}
1589
1590// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1591void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001592 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001593}
1594
1595void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001596 GenCas(invoke, Primitive::kPrimLong, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001597}
1598
1599// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1600void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001601 // The only read barrier implementation supporting the
1602 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1603 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1604 return;
1605 }
1606
1607 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001608}
1609
1610void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001611 // The only read barrier implementation supporting the
1612 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1613 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1614
1615 GenCas(invoke, Primitive::kPrimNot, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001616}
1617
Chris Larsen9701c2e2015-09-04 17:22:47 -07001618// int java.lang.String.compareTo(String anotherString)
1619void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1620 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescufc734082016-07-19 17:18:07 +01001621 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001622 kIntrinsified);
1623 InvokeRuntimeCallingConvention calling_convention;
1624 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1625 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1626 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1627 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1628}
1629
1630void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1631 Mips64Assembler* assembler = GetAssembler();
1632 LocationSummary* locations = invoke->GetLocations();
1633
1634 // Note that the null check must have been done earlier.
1635 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1636
1637 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1638 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1639 codegen_->AddSlowPath(slow_path);
1640 __ Beqzc(argument, slow_path->GetEntryLabel());
1641
Serban Constantinescufc734082016-07-19 17:18:07 +01001642 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001643 __ Bind(slow_path->GetExitLabel());
1644}
1645
Chris Larsen972d6d72015-10-20 11:29:12 -07001646// boolean java.lang.String.equals(Object anObject)
1647void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
1648 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1649 LocationSummary::kNoCall,
1650 kIntrinsified);
1651 locations->SetInAt(0, Location::RequiresRegister());
1652 locations->SetInAt(1, Location::RequiresRegister());
1653 locations->SetOut(Location::RequiresRegister());
1654
1655 // Temporary registers to store lengths of strings and for calculations.
1656 locations->AddTemp(Location::RequiresRegister());
1657 locations->AddTemp(Location::RequiresRegister());
1658 locations->AddTemp(Location::RequiresRegister());
1659}
1660
1661void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1662 Mips64Assembler* assembler = GetAssembler();
1663 LocationSummary* locations = invoke->GetLocations();
1664
1665 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1666 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1667 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1668
1669 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1670 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1671 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1672
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001673 Mips64Label loop;
1674 Mips64Label end;
1675 Mips64Label return_true;
1676 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001677
1678 // Get offsets of count, value, and class fields within a string object.
1679 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1680 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1681 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1682
1683 // Note that the null check must have been done earlier.
1684 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1685
1686 // If the register containing the pointer to "this", and the register
1687 // containing the pointer to "anObject" are the same register then
1688 // "this", and "anObject" are the same object and we can
1689 // short-circuit the logic to a true result.
1690 if (str == arg) {
1691 __ LoadConst64(out, 1);
1692 return;
1693 }
1694
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001695 StringEqualsOptimizations optimizations(invoke);
1696 if (!optimizations.GetArgumentNotNull()) {
1697 // Check if input is null, return false if it is.
1698 __ Beqzc(arg, &return_false);
1699 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001700
1701 // Reference equality check, return true if same reference.
1702 __ Beqc(str, arg, &return_true);
1703
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001704 if (!optimizations.GetArgumentIsString()) {
1705 // Instanceof check for the argument by comparing class fields.
1706 // All string objects must have the same type since String cannot be subclassed.
1707 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1708 // If the argument is a string object, its class field must be equal to receiver's class field.
1709 __ Lw(temp1, str, class_offset);
1710 __ Lw(temp2, arg, class_offset);
1711 __ Bnec(temp1, temp2, &return_false);
1712 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001713
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001714 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001715 __ Lw(temp1, str, count_offset);
1716 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001717 // Check if `count` fields are equal, return false if they're not.
1718 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001719 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001720 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1721 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1722 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001723 __ Beqzc(temp1, &return_true);
1724
1725 // Don't overwrite input registers
1726 __ Move(TMP, str);
1727 __ Move(temp3, arg);
1728
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001729 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001730 DCHECK_ALIGNED(value_offset, 8);
1731 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1732
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001733 if (mirror::kUseStringCompression) {
1734 // For string compression, calculate the number of bytes to compare (not chars).
1735 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1736 __ Srl(temp1, temp1, 1); // Extract length.
1737 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1738 }
1739
1740 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1741 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001742 __ Bind(&loop);
1743 __ Ld(out, TMP, value_offset);
1744 __ Ld(temp2, temp3, value_offset);
1745 __ Bnec(out, temp2, &return_false);
1746 __ Daddiu(TMP, TMP, 8);
1747 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001748 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1749 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001750 __ Bgtzc(temp1, &loop);
1751
1752 // Return true and exit the function.
1753 // If loop does not result in returning false, we return true.
1754 __ Bind(&return_true);
1755 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001756 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001757
1758 // Return false and exit the function.
1759 __ Bind(&return_false);
1760 __ LoadConst64(out, 0);
1761 __ Bind(&end);
1762}
1763
Chris Larsen9701c2e2015-09-04 17:22:47 -07001764static void GenerateStringIndexOf(HInvoke* invoke,
1765 Mips64Assembler* assembler,
1766 CodeGeneratorMIPS64* codegen,
1767 ArenaAllocator* allocator,
1768 bool start_at_zero) {
1769 LocationSummary* locations = invoke->GetLocations();
1770 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1771
1772 // Note that the null check must have been done earlier.
1773 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1774
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001775 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1776 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001777 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001778 HInstruction* code_point = invoke->InputAt(1);
1779 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001780 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001781 // Always needs the slow-path. We could directly dispatch to it,
1782 // but this case should be rare, so for simplicity just put the
1783 // full slow-path down and branch unconditionally.
1784 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1785 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001786 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001787 __ Bind(slow_path->GetExitLabel());
1788 return;
1789 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001790 } else if (code_point->GetType() != Primitive::kPrimChar) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001791 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1792 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1793 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1794 codegen->AddSlowPath(slow_path);
1795 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1796 }
1797
1798 if (start_at_zero) {
1799 DCHECK_EQ(tmp_reg, A2);
1800 // Start-index = 0.
1801 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001802 }
1803
Serban Constantinescufc734082016-07-19 17:18:07 +01001804 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001805 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001806
1807 if (slow_path != nullptr) {
1808 __ Bind(slow_path->GetExitLabel());
1809 }
1810}
1811
1812// int java.lang.String.indexOf(int ch)
1813void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1814 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001815 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001816 kIntrinsified);
1817 // We have a hand-crafted assembly stub that follows the runtime
1818 // calling convention. So it's best to align the inputs accordingly.
1819 InvokeRuntimeCallingConvention calling_convention;
1820 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1821 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1822 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1823 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1824
1825 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1826 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1827}
1828
1829void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001830 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001831}
1832
1833// int java.lang.String.indexOf(int ch, int fromIndex)
1834void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1835 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001836 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001837 kIntrinsified);
1838 // We have a hand-crafted assembly stub that follows the runtime
1839 // calling convention. So it's best to align the inputs accordingly.
1840 InvokeRuntimeCallingConvention calling_convention;
1841 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1842 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1843 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1844 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1845 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1846}
1847
1848void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001849 GenerateStringIndexOf(
1850 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001851}
1852
Roland Levillaincc3839c2016-02-29 16:23:48 +00001853// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001854void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1855 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001856 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001857 kIntrinsified);
1858 InvokeRuntimeCallingConvention calling_convention;
1859 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1860 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1861 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1862 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1863 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1864 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1865}
1866
1867void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1868 Mips64Assembler* assembler = GetAssembler();
1869 LocationSummary* locations = invoke->GetLocations();
1870
1871 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1872 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1873 codegen_->AddSlowPath(slow_path);
1874 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1875
Serban Constantinescufc734082016-07-19 17:18:07 +01001876 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001877 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001878 __ Bind(slow_path->GetExitLabel());
1879}
1880
Roland Levillaincc3839c2016-02-29 16:23:48 +00001881// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001882void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1883 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001884 LocationSummary::kCallOnMainOnly,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001885 kIntrinsified);
1886 InvokeRuntimeCallingConvention calling_convention;
1887 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1888 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1889 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1890 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1891 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1892}
1893
1894void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001895 // No need to emit code checking whether `locations->InAt(2)` is a null
1896 // pointer, as callers of the native method
1897 //
1898 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1899 //
1900 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001901 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001902 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001903}
1904
Roland Levillainf969a202016-03-09 16:14:00 +00001905// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001906void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1907 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001908 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001909 kIntrinsified);
1910 InvokeRuntimeCallingConvention calling_convention;
1911 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Chris Larsen9701c2e2015-09-04 17:22:47 -07001912 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1913 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1914}
1915
1916void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1917 Mips64Assembler* assembler = GetAssembler();
1918 LocationSummary* locations = invoke->GetLocations();
1919
1920 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1921 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1922 codegen_->AddSlowPath(slow_path);
1923 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1924
Serban Constantinescufc734082016-07-19 17:18:07 +01001925 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001926 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001927 __ Bind(slow_path->GetExitLabel());
1928}
1929
Chris Larsenddec7f92016-02-16 12:35:04 -08001930static void GenIsInfinite(LocationSummary* locations,
1931 bool is64bit,
1932 Mips64Assembler* assembler) {
1933 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1934 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1935
1936 if (is64bit) {
1937 __ ClassD(FTMP, in);
1938 } else {
1939 __ ClassS(FTMP, in);
1940 }
1941 __ Mfc1(out, FTMP);
1942 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1943 __ Sltu(out, ZERO, out);
1944}
1945
1946// boolean java.lang.Float.isInfinite(float)
1947void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1948 CreateFPToIntLocations(arena_, invoke);
1949}
1950
1951void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1952 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1953}
1954
1955// boolean java.lang.Double.isInfinite(double)
1956void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1957 CreateFPToIntLocations(arena_, invoke);
1958}
1959
1960void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1961 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1962}
1963
Chris Larsene3660592016-11-09 11:13:42 -08001964// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1965void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1966 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Chris Larsen366d4332017-03-23 09:02:56 -07001967 LocationSummary::kNoCall,
Chris Larsene3660592016-11-09 11:13:42 -08001968 kIntrinsified);
1969 locations->SetInAt(0, Location::RequiresRegister());
1970 locations->SetInAt(1, Location::RequiresRegister());
1971 locations->SetInAt(2, Location::RequiresRegister());
1972 locations->SetInAt(3, Location::RequiresRegister());
1973 locations->SetInAt(4, Location::RequiresRegister());
1974
Chris Larsen366d4332017-03-23 09:02:56 -07001975 locations->AddTemp(Location::RequiresRegister());
1976 locations->AddTemp(Location::RequiresRegister());
1977 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001978}
1979
1980void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1981 Mips64Assembler* assembler = GetAssembler();
1982 LocationSummary* locations = invoke->GetLocations();
1983
1984 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1985 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1986 DCHECK_EQ(char_size, 2u);
1987 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
1988
1989 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1990 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1991 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1992 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1993 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1994
1995 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001996 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001997 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001998
1999 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07002000 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08002001
2002 // Location of data in char array buffer.
2003 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2004
2005 // Get offset of value field within a string object.
2006 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
2007
2008 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
2009
2010 // Calculate number of characters to be copied.
2011 __ Dsubu(numChrs, srcEnd, srcBegin);
2012
2013 // Calculate destination address.
2014 __ Daddiu(dstPtr, dstObj, data_offset);
2015 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
2016
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002017 if (mirror::kUseStringCompression) {
2018 Mips64Label uncompressed_copy, compressed_loop;
2019 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2020 // Load count field and extract compression flag.
2021 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2022 __ Dext(TMP, TMP, 0, 1);
2023
Chris Larsen366d4332017-03-23 09:02:56 -07002024 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002025 __ Bnezc(TMP, &uncompressed_copy);
2026
2027 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2028 __ Daddu(srcPtr, srcObj, srcBegin);
2029 __ Bind(&compressed_loop);
2030 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2031 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2032 __ Daddiu(numChrs, numChrs, -1);
2033 __ Daddiu(srcPtr, srcPtr, 1);
2034 __ Daddiu(dstPtr, dstPtr, 2);
2035 __ Bnezc(numChrs, &compressed_loop);
2036
2037 __ Bc(&done);
2038 __ Bind(&uncompressed_copy);
2039 }
2040
Chris Larsene3660592016-11-09 11:13:42 -08002041 // Calculate source address.
2042 __ Daddiu(srcPtr, srcObj, value_offset);
2043 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
2044
Chris Larsen366d4332017-03-23 09:02:56 -07002045 __ Bind(&loop);
2046 __ Lh(AT, srcPtr, 0);
2047 __ Daddiu(numChrs, numChrs, -1);
2048 __ Daddiu(srcPtr, srcPtr, char_size);
2049 __ Sh(AT, dstPtr, 0);
2050 __ Daddiu(dstPtr, dstPtr, char_size);
2051 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08002052
2053 __ Bind(&done);
2054}
2055
Chris Larsen5863f852017-03-23 15:41:37 -07002056// static void java.lang.System.arraycopy(Object src, int srcPos,
2057// Object dest, int destPos,
2058// int length)
2059void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2060 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2061 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2062 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2063
2064 // As long as we are checking, we might as well check to see if the src and dest
2065 // positions are >= 0.
2066 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2067 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2068 // We will have to fail anyways.
2069 return;
2070 }
2071
2072 // And since we are already checking, check the length too.
2073 if (length != nullptr) {
2074 int32_t len = length->GetValue();
2075 if (len < 0) {
2076 // Just call as normal.
2077 return;
2078 }
2079 }
2080
2081 // Okay, it is safe to generate inline code.
2082 LocationSummary* locations =
2083 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
2084 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2085 locations->SetInAt(0, Location::RequiresRegister());
2086 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2087 locations->SetInAt(2, Location::RequiresRegister());
2088 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2089 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2090
2091 locations->AddTemp(Location::RequiresRegister());
2092 locations->AddTemp(Location::RequiresRegister());
2093 locations->AddTemp(Location::RequiresRegister());
2094}
2095
2096// Utility routine to verify that "length(input) - pos >= length"
2097static void EnoughItems(Mips64Assembler* assembler,
2098 GpuRegister length_input_minus_pos,
2099 Location length,
2100 SlowPathCodeMIPS64* slow_path) {
2101 if (length.IsConstant()) {
2102 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2103
2104 if (IsInt<16>(length_constant)) {
2105 __ Slti(TMP, length_input_minus_pos, length_constant);
2106 __ Bnezc(TMP, slow_path->GetEntryLabel());
2107 } else {
2108 __ LoadConst32(TMP, length_constant);
2109 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2110 }
2111 } else {
2112 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2113 }
2114}
2115
2116static void CheckPosition(Mips64Assembler* assembler,
2117 Location pos,
2118 GpuRegister input,
2119 Location length,
2120 SlowPathCodeMIPS64* slow_path,
2121 bool length_is_input_length = false) {
2122 // Where is the length in the Array?
2123 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2124
2125 // Calculate length(input) - pos.
2126 if (pos.IsConstant()) {
2127 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2128 if (pos_const == 0) {
2129 if (!length_is_input_length) {
2130 // Check that length(input) >= length.
2131 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2132 EnoughItems(assembler, AT, length, slow_path);
2133 }
2134 } else {
2135 // Check that (length(input) - pos) >= zero.
2136 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2137 DCHECK_GT(pos_const, 0);
2138 __ Addiu32(AT, AT, -pos_const);
2139 __ Bltzc(AT, slow_path->GetEntryLabel());
2140
2141 // Verify that (length(input) - pos) >= length.
2142 EnoughItems(assembler, AT, length, slow_path);
2143 }
2144 } else if (length_is_input_length) {
2145 // The only way the copy can succeed is if pos is zero.
2146 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2147 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
2148 } else {
2149 // Verify that pos >= 0.
2150 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2151 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
2152
2153 // Check that (length(input) - pos) >= zero.
2154 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2155 __ Subu(AT, AT, pos_reg);
2156 __ Bltzc(AT, slow_path->GetEntryLabel());
2157
2158 // Verify that (length(input) - pos) >= length.
2159 EnoughItems(assembler, AT, length, slow_path);
2160 }
2161}
2162
2163void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2164 Mips64Assembler* assembler = GetAssembler();
2165 LocationSummary* locations = invoke->GetLocations();
2166
2167 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
2168 Location src_pos = locations->InAt(1);
2169 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
2170 Location dest_pos = locations->InAt(3);
2171 Location length = locations->InAt(4);
2172
2173 Mips64Label loop;
2174
2175 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
2176 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
2177 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
2178
2179 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
2180 codegen_->AddSlowPath(slow_path);
2181
2182 // Bail out if the source and destination are the same (to handle overlap).
2183 __ Beqc(src, dest, slow_path->GetEntryLabel());
2184
2185 // Bail out if the source is null.
2186 __ Beqzc(src, slow_path->GetEntryLabel());
2187
2188 // Bail out if the destination is null.
2189 __ Beqzc(dest, slow_path->GetEntryLabel());
2190
2191 // Load length into register for count.
2192 if (length.IsConstant()) {
2193 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2194 } else {
2195 // If the length is negative, bail out.
2196 // We have already checked in the LocationsBuilder for the constant case.
2197 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2198
2199 __ Move(count, length.AsRegister<GpuRegister>());
2200 }
2201
2202 // Validity checks: source.
2203 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2204
2205 // Validity checks: dest.
2206 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2207
2208 // If count is zero, we're done.
2209 __ Beqzc(count, slow_path->GetExitLabel());
2210
2211 // Okay, everything checks out. Finally time to do the copy.
2212 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2213 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2214 DCHECK_EQ(char_size, 2u);
2215
2216 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
2217
2218 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2219
2220 // Calculate source and destination addresses.
2221 if (src_pos.IsConstant()) {
2222 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2223
2224 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
2225 } else {
2226 __ Daddiu64(src_base, src, data_offset, TMP);
2227 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
2228 }
2229 if (dest_pos.IsConstant()) {
2230 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2231
2232 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2233 } else {
2234 __ Daddiu64(dest_base, dest, data_offset, TMP);
2235 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
2236 }
2237
2238 __ Bind(&loop);
2239 __ Lh(TMP, src_base, 0);
2240 __ Daddiu(src_base, src_base, char_size);
2241 __ Daddiu(count, count, -1);
2242 __ Sh(TMP, dest_base, 0);
2243 __ Daddiu(dest_base, dest_base, char_size);
2244 __ Bnezc(count, &loop);
2245
2246 __ Bind(slow_path->GetExitLabel());
2247}
2248
Chris Larsenab922502016-04-15 10:00:56 -07002249static void GenHighestOneBit(LocationSummary* locations,
2250 Primitive::Type type,
2251 Mips64Assembler* assembler) {
2252 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << PrettyDescriptor(type);
2253
2254 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2255 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2256
2257 if (type == Primitive::kPrimLong) {
2258 __ Dclz(TMP, in);
2259 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07002260 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002261 } else {
2262 __ Clz(TMP, in);
2263 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07002264 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002265 }
2266 // For either value of "type", when "in" is zero, "out" should also
2267 // be zero. Without this extra "and" operation, when "in" is zero,
2268 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
2269 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
2270 // the shift amount (TMP) directly; they use either (TMP % 64) or
2271 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07002272 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07002273}
2274
2275// int java.lang.Integer.highestOneBit(int)
2276void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
2277 CreateIntToIntLocations(arena_, invoke);
2278}
2279
2280void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
2281 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
2282}
2283
2284// long java.lang.Long.highestOneBit(long)
2285void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
2286 CreateIntToIntLocations(arena_, invoke);
2287}
2288
2289void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
2290 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
2291}
2292
2293static void GenLowestOneBit(LocationSummary* locations,
2294 Primitive::Type type,
2295 Mips64Assembler* assembler) {
2296 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << PrettyDescriptor(type);
2297
2298 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2299 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2300
2301 if (type == Primitive::kPrimLong) {
2302 __ Dsubu(TMP, ZERO, in);
2303 } else {
2304 __ Subu(TMP, ZERO, in);
2305 }
2306 __ And(out, TMP, in);
2307}
2308
2309// int java.lang.Integer.lowestOneBit(int)
2310void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
2311 CreateIntToIntLocations(arena_, invoke);
2312}
2313
2314void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
2315 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
2316}
2317
2318// long java.lang.Long.lowestOneBit(long)
2319void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
2320 CreateIntToIntLocations(arena_, invoke);
2321}
2322
2323void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
2324 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
2325}
2326
Chris Larsen74c20582017-03-28 22:17:35 -07002327static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2328 LocationSummary* locations = new (arena) LocationSummary(invoke,
2329 LocationSummary::kCallOnMainOnly,
2330 kIntrinsified);
2331 InvokeRuntimeCallingConvention calling_convention;
2332
2333 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2334 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2335}
2336
2337static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2338 LocationSummary* locations = new (arena) LocationSummary(invoke,
2339 LocationSummary::kCallOnMainOnly,
2340 kIntrinsified);
2341 InvokeRuntimeCallingConvention calling_convention;
2342
2343 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2344 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2345 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2346}
2347
2348static void GenFPToFPCall(HInvoke* invoke,
2349 CodeGeneratorMIPS64* codegen,
2350 QuickEntrypointEnum entry) {
2351 LocationSummary* locations = invoke->GetLocations();
2352 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2353 DCHECK_EQ(in, F12);
2354 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2355 DCHECK_EQ(out, F0);
2356
2357 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2358}
2359
2360static void GenFPFPToFPCall(HInvoke* invoke,
2361 CodeGeneratorMIPS64* codegen,
2362 QuickEntrypointEnum entry) {
2363 LocationSummary* locations = invoke->GetLocations();
2364 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2365 DCHECK_EQ(in0, F12);
2366 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2367 DCHECK_EQ(in1, F13);
2368 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2369 DCHECK_EQ(out, F0);
2370
2371 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2372}
2373
2374// static double java.lang.Math.cos(double a)
2375void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
2376 CreateFPToFPCallLocations(arena_, invoke);
2377}
2378
2379void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2380 GenFPToFPCall(invoke, codegen_, kQuickCos);
2381}
2382
2383// static double java.lang.Math.sin(double a)
2384void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
2385 CreateFPToFPCallLocations(arena_, invoke);
2386}
2387
2388void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2389 GenFPToFPCall(invoke, codegen_, kQuickSin);
2390}
2391
2392// static double java.lang.Math.acos(double a)
2393void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
2394 CreateFPToFPCallLocations(arena_, invoke);
2395}
2396
2397void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2398 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2399}
2400
2401// static double java.lang.Math.asin(double a)
2402void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
2403 CreateFPToFPCallLocations(arena_, invoke);
2404}
2405
2406void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2407 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2408}
2409
2410// static double java.lang.Math.atan(double a)
2411void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
2412 CreateFPToFPCallLocations(arena_, invoke);
2413}
2414
2415void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2416 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2417}
2418
2419// static double java.lang.Math.atan2(double y, double x)
2420void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
2421 CreateFPFPToFPCallLocations(arena_, invoke);
2422}
2423
2424void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2425 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2426}
2427
2428// static double java.lang.Math.cbrt(double a)
2429void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
2430 CreateFPToFPCallLocations(arena_, invoke);
2431}
2432
2433void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2434 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2435}
2436
2437// static double java.lang.Math.cosh(double x)
2438void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
2439 CreateFPToFPCallLocations(arena_, invoke);
2440}
2441
2442void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2443 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2444}
2445
2446// static double java.lang.Math.exp(double a)
2447void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
2448 CreateFPToFPCallLocations(arena_, invoke);
2449}
2450
2451void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2452 GenFPToFPCall(invoke, codegen_, kQuickExp);
2453}
2454
2455// static double java.lang.Math.expm1(double x)
2456void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
2457 CreateFPToFPCallLocations(arena_, invoke);
2458}
2459
2460void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2461 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2462}
2463
2464// static double java.lang.Math.hypot(double x, double y)
2465void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
2466 CreateFPFPToFPCallLocations(arena_, invoke);
2467}
2468
2469void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2470 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2471}
2472
2473// static double java.lang.Math.log(double a)
2474void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
2475 CreateFPToFPCallLocations(arena_, invoke);
2476}
2477
2478void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2479 GenFPToFPCall(invoke, codegen_, kQuickLog);
2480}
2481
2482// static double java.lang.Math.log10(double x)
2483void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
2484 CreateFPToFPCallLocations(arena_, invoke);
2485}
2486
2487void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2488 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2489}
2490
2491// static double java.lang.Math.nextAfter(double start, double direction)
2492void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2493 CreateFPFPToFPCallLocations(arena_, invoke);
2494}
2495
2496void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2497 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2498}
2499
2500// static double java.lang.Math.sinh(double x)
2501void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
2502 CreateFPToFPCallLocations(arena_, invoke);
2503}
2504
2505void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2506 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2507}
2508
2509// static double java.lang.Math.tan(double a)
2510void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
2511 CreateFPToFPCallLocations(arena_, invoke);
2512}
2513
2514void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2515 GenFPToFPCall(invoke, codegen_, kQuickTan);
2516}
2517
2518// static double java.lang.Math.tanh(double x)
2519void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
2520 CreateFPToFPCallLocations(arena_, invoke);
2521}
2522
2523void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2524 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2525}
2526
Chris Larsen5633ce72017-04-10 15:47:40 -07002527// long java.lang.Integer.valueOf(long)
2528void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2529 InvokeRuntimeCallingConvention calling_convention;
2530 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2531 invoke,
2532 codegen_,
2533 calling_convention.GetReturnLocation(Primitive::kPrimNot),
2534 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2535}
2536
2537void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2538 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2539 LocationSummary* locations = invoke->GetLocations();
2540 Mips64Assembler* assembler = GetAssembler();
2541 InstructionCodeGeneratorMIPS64* icodegen =
2542 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2543
2544 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2545 InvokeRuntimeCallingConvention calling_convention;
2546 if (invoke->InputAt(0)->IsConstant()) {
2547 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2548 if (value >= info.low && value <= info.high) {
2549 // Just embed the j.l.Integer in the code.
2550 ScopedObjectAccess soa(Thread::Current());
2551 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2552 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2553 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2554 __ LoadConst64(out, address);
2555 } else {
2556 // Allocate and initialize a new j.l.Integer.
2557 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2558 // JIT object table.
2559 uint32_t address =
2560 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2561 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2562 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2563 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2564 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2565 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2566 // one.
2567 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2568 }
2569 } else {
2570 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2571 Mips64Label allocate, done;
2572 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2573
2574 // Is (info.low <= in) && (in <= info.high)?
2575 __ Addiu32(out, in, -info.low);
2576 // As unsigned quantities is out < (info.high - info.low + 1)?
2577 __ LoadConst32(AT, count);
2578 // Branch if out >= (info.high - info.low + 1).
2579 // This means that "in" is outside of the range [info.low, info.high].
2580 __ Bgeuc(out, AT, &allocate);
2581
2582 // If the value is within the bounds, load the j.l.Integer directly from the array.
2583 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2584 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2585 __ LoadConst64(TMP, data_offset + address);
2586 __ Dlsa(out, out, TMP, TIMES_4);
2587 __ Lwu(out, out, 0);
2588 __ MaybeUnpoisonHeapReference(out);
2589 __ Bc(&done);
2590
2591 __ Bind(&allocate);
2592 // Otherwise allocate and initialize a new j.l.Integer.
2593 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2594 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2595 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2596 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2597 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2598 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2599 // one.
2600 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2601 __ Bind(&done);
2602 }
2603}
2604
Aart Bik2f9fcc92016-03-01 15:16:54 -08002605UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002606UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002607
Aart Bikff7d89c2016-11-07 08:49:28 -08002608UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2609UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002610UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2611UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2612UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2613UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2614UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2615UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002616
Aart Bik0e54c012016-03-04 12:08:31 -08002617// 1.8.
2618UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2619UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2620UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2621UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2622UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002623
Aart Bik2f9fcc92016-03-01 15:16:54 -08002624UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002625
2626#undef __
2627
2628} // namespace mips64
2629} // namespace art