blob: fe16d00b7243d55d4c40338d8f3037ea1f81b2d9 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips64/assembler_mips64.h"
28#include "utils/mips64/constants_mips64.h"
29
30namespace art {
31
32namespace mips64 {
33
34IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
39 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsen9701c2e2015-09-04 17:22:47 -070046#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS64* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
76 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
89 public:
90 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke) : invoke_(invoke) { }
91
92 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
93 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
94
95 __ Bind(GetEntryLabel());
96
97 SaveLiveRegisters(codegen, invoke_->GetLocations());
98
99 MoveArguments(invoke_, codegen);
100
101 if (invoke_->IsInvokeStaticOrDirect()) {
102 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
103 Location::RegisterLocation(A0));
104 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
105 } else {
106 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
107 UNREACHABLE();
108 }
109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
119 __ B(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
129};
130
131#undef __
132
Chris Larsen3039e382015-08-26 07:54:08 -0700133bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
150 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
151 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
152
153 if (is64bit) {
154 __ Dmfc1(out, in);
155 } else {
156 __ Mfc1(out, in);
157 }
158}
159
160// long java.lang.Double.doubleToRawLongBits(double)
161void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
162 CreateFPToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
166 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
167}
168
169// int java.lang.Float.floatToRawIntBits(float)
170void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
171 CreateFPToIntLocations(arena_, invoke);
172}
173
174void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
175 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
187 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
188 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
189
190 if (is64bit) {
191 __ Dmtc1(in, out);
192 } else {
193 __ Mtc1(in, out);
194 }
195}
196
197// double java.lang.Double.longBitsToDouble(long)
198void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
203 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
204}
205
206// float java.lang.Float.intBitsToFloat(int)
207void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
212 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
224 Primitive::Type type,
225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
230 case Primitive::kPrimShort:
231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
234 case Primitive::kPrimInt:
235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
238 case Primitive::kPrimLong:
239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
259 CreateIntToIntLocations(arena_, invoke);
260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
263 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
268 CreateIntToIntLocations(arena_, invoke);
269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
272 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
273}
274
Chris Larsen0646da72015-09-22 16:02:40 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700276 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
277 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
278
279 if (is64bit) {
280 __ Dclz(out, in);
281 } else {
282 __ Clz(out, in);
283 }
284}
285
286// int java.lang.Integer.numberOfLeadingZeros(int i)
287void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
288 CreateIntToIntLocations(arena_, invoke);
289}
290
291void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsen0646da72015-09-22 16:02:40 -0700292 GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700293}
294
295// int java.lang.Long.numberOfLeadingZeros(long i)
296void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
297 CreateIntToIntLocations(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsen0646da72015-09-22 16:02:40 -0700301 GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler());
302}
303
304static void GenNumberOfTrailingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
305 Location in = locations->InAt(0);
306 Location out = locations->Out();
307
308 if (is64bit) {
309 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
310 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
311 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
312 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
313 } else {
314 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
315 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
318 }
319}
320
321// int java.lang.Integer.numberOfTrailingZeros(int i)
322void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
323 CreateIntToIntLocations(arena_, invoke);
324}
325
326void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler());
328}
329
330// int java.lang.Long.numberOfTrailingZeros(long i)
331void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
332 CreateIntToIntLocations(arena_, invoke);
333}
334
335void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
336 GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700337}
338
Chris Larsen9aebff22015-09-22 17:54:15 -0700339static void GenRotateRight(HInvoke* invoke,
340 Primitive::Type type,
341 Mips64Assembler* assembler) {
342 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
343
344 LocationSummary* locations = invoke->GetLocations();
345 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
346 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
347
348 if (invoke->InputAt(1)->IsIntConstant()) {
349 uint32_t shift = static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
350 if (type == Primitive::kPrimInt) {
351 shift &= 0x1f;
352 __ Rotr(out, in, shift);
353 } else {
354 shift &= 0x3f;
355 if (shift < 32) {
356 __ Drotr(out, in, shift);
357 } else {
358 shift &= 0x1f;
359 __ Drotr32(out, in, shift);
360 }
361 }
362 } else {
363 GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
364 if (type == Primitive::kPrimInt) {
365 __ Rotrv(out, in, shamt);
366 } else {
367 __ Drotrv(out, in, shamt);
368 }
369 }
370}
371
372// int java.lang.Integer.rotateRight(int i, int distance)
373void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
374 LocationSummary* locations = new (arena_) LocationSummary(invoke,
375 LocationSummary::kNoCall,
376 kIntrinsified);
377 locations->SetInAt(0, Location::RequiresRegister());
378 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
379 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
380}
381
382void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
383 GenRotateRight(invoke, Primitive::kPrimInt, GetAssembler());
384}
385
386// int java.lang.Long.rotateRight(long i, int distance)
387void IntrinsicLocationsBuilderMIPS64::VisitLongRotateRight(HInvoke* invoke) {
388 LocationSummary* locations = new (arena_) LocationSummary(invoke,
389 LocationSummary::kNoCall,
390 kIntrinsified);
391 locations->SetInAt(0, Location::RequiresRegister());
392 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
393 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
394}
395
396void IntrinsicCodeGeneratorMIPS64::VisitLongRotateRight(HInvoke* invoke) {
397 GenRotateRight(invoke, Primitive::kPrimLong, GetAssembler());
398}
399
Chris Larsen3039e382015-08-26 07:54:08 -0700400static void GenReverse(LocationSummary* locations,
401 Primitive::Type type,
402 Mips64Assembler* assembler) {
403 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
404
405 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
406 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
407
408 if (type == Primitive::kPrimInt) {
409 __ Rotr(out, in, 16);
410 __ Wsbh(out, out);
411 __ Bitswap(out, out);
412 } else {
413 __ Dsbh(out, in);
414 __ Dshd(out, out);
415 __ Dbitswap(out, out);
416 }
417}
418
419// int java.lang.Integer.reverse(int)
420void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
421 CreateIntToIntLocations(arena_, invoke);
422}
423
424void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
425 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
426}
427
428// long java.lang.Long.reverse(long)
429void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
430 CreateIntToIntLocations(arena_, invoke);
431}
432
433void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
434 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
435}
436
Chris Larsen0b7ac982015-09-04 12:54:28 -0700437static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
438 LocationSummary* locations = new (arena) LocationSummary(invoke,
439 LocationSummary::kNoCall,
440 kIntrinsified);
441 locations->SetInAt(0, Location::RequiresFpuRegister());
442 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
443}
444
445static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
446 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
447 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
448
449 if (is64bit) {
450 __ AbsD(out, in);
451 } else {
452 __ AbsS(out, in);
453 }
454}
455
456// double java.lang.Math.abs(double)
457void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
458 CreateFPToFPLocations(arena_, invoke);
459}
460
461void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
462 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
463}
464
465// float java.lang.Math.abs(float)
466void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
467 CreateFPToFPLocations(arena_, invoke);
468}
469
470void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
471 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
472}
473
474static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
475 LocationSummary* locations = new (arena) LocationSummary(invoke,
476 LocationSummary::kNoCall,
477 kIntrinsified);
478 locations->SetInAt(0, Location::RequiresRegister());
479 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
480}
481
482static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
483 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
484 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
485
486 if (is64bit) {
487 __ Dsra32(AT, in, 31);
488 __ Xor(out, in, AT);
489 __ Dsubu(out, out, AT);
490 } else {
491 __ Sra(AT, in, 31);
492 __ Xor(out, in, AT);
493 __ Subu(out, out, AT);
494 }
495}
496
497// int java.lang.Math.abs(int)
498void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
499 CreateIntToInt(arena_, invoke);
500}
501
502void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
503 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
504}
505
506// long java.lang.Math.abs(long)
507void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
508 CreateIntToInt(arena_, invoke);
509}
510
511void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
512 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
513}
514
515static void GenMinMaxFP(LocationSummary* locations,
516 bool is_min,
517 bool is_double,
518 Mips64Assembler* assembler) {
519 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
520 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
521 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
522
523 if (is_double) {
524 if (is_min) {
525 __ MinD(out, lhs, rhs);
526 } else {
527 __ MaxD(out, lhs, rhs);
528 }
529 } else {
530 if (is_min) {
531 __ MinS(out, lhs, rhs);
532 } else {
533 __ MaxS(out, lhs, rhs);
534 }
535 }
536}
537
538static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
539 LocationSummary* locations = new (arena) LocationSummary(invoke,
540 LocationSummary::kNoCall,
541 kIntrinsified);
542 locations->SetInAt(0, Location::RequiresFpuRegister());
543 locations->SetInAt(1, Location::RequiresFpuRegister());
544 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
545}
546
547// double java.lang.Math.min(double, double)
548void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
549 CreateFPFPToFPLocations(arena_, invoke);
550}
551
552void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
553 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
554}
555
556// float java.lang.Math.min(float, float)
557void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
558 CreateFPFPToFPLocations(arena_, invoke);
559}
560
561void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
562 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
563}
564
565// double java.lang.Math.max(double, double)
566void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
567 CreateFPFPToFPLocations(arena_, invoke);
568}
569
570void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
571 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
572}
573
574// float java.lang.Math.max(float, float)
575void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
576 CreateFPFPToFPLocations(arena_, invoke);
577}
578
579void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
580 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
581}
582
583static void GenMinMax(LocationSummary* locations,
584 bool is_min,
585 Mips64Assembler* assembler) {
586 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
587 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
588 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
589
Chris Larsen14500822015-10-01 11:35:18 -0700590 // Some architectures, such as ARM and MIPS (prior to r6), have a
591 // conditional move instruction which only changes the target
592 // (output) register if the condition is true (MIPS prior to r6 had
593 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
594 // change the target (output) register. If the condition is true the
595 // output register gets the contents of the "rs" register; otherwise,
596 // the output register is set to zero. One consequence of this is
597 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
598 // needs to use a pair of SELEQZ/SELNEZ instructions. After
599 // executing this pair of instructions one of the output registers
600 // from the pair will necessarily contain zero. Then the code ORs the
601 // output registers from the SELEQZ/SELNEZ instructions to get the
602 // final result.
603 //
604 // The initial test to see if the output register is same as the
605 // first input register is needed to make sure that value in the
606 // first input register isn't clobbered before we've finished
607 // computing the output value. The logic in the corresponding else
608 // clause performs the same task but makes sure the second input
609 // register isn't clobbered in the event that it's the same register
610 // as the output register; the else clause also handles the case
611 // where the output register is distinct from both the first, and the
612 // second input registers.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700613 if (out == lhs) {
614 __ Slt(AT, rhs, lhs);
615 if (is_min) {
616 __ Seleqz(out, lhs, AT);
617 __ Selnez(AT, rhs, AT);
618 } else {
619 __ Selnez(out, lhs, AT);
620 __ Seleqz(AT, rhs, AT);
621 }
622 } else {
623 __ Slt(AT, lhs, rhs);
624 if (is_min) {
625 __ Seleqz(out, rhs, AT);
626 __ Selnez(AT, lhs, AT);
627 } else {
628 __ Selnez(out, rhs, AT);
629 __ Seleqz(AT, lhs, AT);
630 }
631 }
632 __ Or(out, out, AT);
633}
634
635static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
636 LocationSummary* locations = new (arena) LocationSummary(invoke,
637 LocationSummary::kNoCall,
638 kIntrinsified);
639 locations->SetInAt(0, Location::RequiresRegister());
640 locations->SetInAt(1, Location::RequiresRegister());
641 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
642}
643
644// int java.lang.Math.min(int, int)
645void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
646 CreateIntIntToIntLocations(arena_, invoke);
647}
648
649void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
650 GenMinMax(invoke->GetLocations(), true, GetAssembler());
651}
652
653// long java.lang.Math.min(long, long)
654void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
655 CreateIntIntToIntLocations(arena_, invoke);
656}
657
658void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
659 GenMinMax(invoke->GetLocations(), true, GetAssembler());
660}
661
662// int java.lang.Math.max(int, int)
663void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
664 CreateIntIntToIntLocations(arena_, invoke);
665}
666
667void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
668 GenMinMax(invoke->GetLocations(), false, GetAssembler());
669}
670
671// long java.lang.Math.max(long, long)
672void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
673 CreateIntIntToIntLocations(arena_, invoke);
674}
675
676void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
677 GenMinMax(invoke->GetLocations(), false, GetAssembler());
678}
679
680// double java.lang.Math.sqrt(double)
681void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
682 CreateFPToFPLocations(arena_, invoke);
683}
684
685void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
686 LocationSummary* locations = invoke->GetLocations();
687 Mips64Assembler* assembler = GetAssembler();
688 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
689 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
690
691 __ SqrtD(out, in);
692}
693
694static void CreateFPToFP(ArenaAllocator* arena, HInvoke* invoke) {
695 LocationSummary* locations = new (arena) LocationSummary(invoke,
696 LocationSummary::kNoCall,
697 kIntrinsified);
698 locations->SetInAt(0, Location::RequiresFpuRegister());
699 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
700}
701
702// double java.lang.Math.rint(double)
703void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
704 CreateFPToFP(arena_, invoke);
705}
706
707void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
708 LocationSummary* locations = invoke->GetLocations();
709 Mips64Assembler* assembler = GetAssembler();
710 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
711 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
712
713 __ RintD(out, in);
714}
715
716// double java.lang.Math.floor(double)
717void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
718 CreateFPToFP(arena_, invoke);
719}
720
Chris Larsen14500822015-10-01 11:35:18 -0700721const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
722 kPositiveInfinity |
723 kNegativeZero |
724 kNegativeInfinity |
725 kQuietNaN |
726 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700727
728void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
729 LocationSummary* locations = invoke->GetLocations();
730 Mips64Assembler* assembler = GetAssembler();
731 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
732 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
733
734 Label done;
735
736 // double floor(double in) {
737 // if in.isNaN || in.isInfinite || in.isZero {
738 // return in;
739 // }
740 __ ClassD(out, in);
741 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700742 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700743 __ MovD(out, in);
744 __ Bnezc(AT, &done);
745
746 // Long outLong = floor(in);
747 // if outLong == Long.MAX_VALUE {
748 // // floor() has almost certainly returned a value which
749 // // can't be successfully represented as a signed 64-bit
750 // // number. Java expects that the input value will be
751 // // returned in these cases.
752 // // There is also a small probability that floor(in)
753 // // correctly truncates the input value to Long.MAX_VALUE. In
754 // // that case, this exception handling code still does the
755 // // correct thing.
756 // return in;
757 // }
758 __ FloorLD(out, in);
759 __ Dmfc1(AT, out);
760 __ MovD(out, in);
761 __ LoadConst64(TMP, kPrimLongMax);
762 __ Beqc(AT, TMP, &done);
763
764 // double out = outLong;
765 // return out;
766 __ Dmtc1(AT, out);
767 __ Cvtdl(out, out);
768 __ Bind(&done);
769 // }
770}
771
772// double java.lang.Math.ceil(double)
773void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
774 CreateFPToFP(arena_, invoke);
775}
776
777void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
778 LocationSummary* locations = invoke->GetLocations();
779 Mips64Assembler* assembler = GetAssembler();
780 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
781 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
782
783 Label done;
784
785 // double ceil(double in) {
786 // if in.isNaN || in.isInfinite || in.isZero {
787 // return in;
788 // }
789 __ ClassD(out, in);
790 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700791 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700792 __ MovD(out, in);
793 __ Bnezc(AT, &done);
794
795 // Long outLong = ceil(in);
796 // if outLong == Long.MAX_VALUE {
797 // // ceil() has almost certainly returned a value which
798 // // can't be successfully represented as a signed 64-bit
799 // // number. Java expects that the input value will be
800 // // returned in these cases.
801 // // There is also a small probability that ceil(in)
802 // // correctly rounds up the input value to Long.MAX_VALUE. In
803 // // that case, this exception handling code still does the
804 // // correct thing.
805 // return in;
806 // }
807 __ CeilLD(out, in);
808 __ Dmfc1(AT, out);
809 __ MovD(out, in);
810 __ LoadConst64(TMP, kPrimLongMax);
811 __ Beqc(AT, TMP, &done);
812
813 // double out = outLong;
814 // return out;
815 __ Dmtc1(AT, out);
816 __ Cvtdl(out, out);
817 __ Bind(&done);
818 // }
819}
820
Chris Larsen70fb1f42015-09-04 10:15:27 -0700821// byte libcore.io.Memory.peekByte(long address)
822void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
823 CreateIntToIntLocations(arena_, invoke);
824}
825
826void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
827 Mips64Assembler* assembler = GetAssembler();
828 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
829 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
830
831 __ Lb(out, adr, 0);
832}
833
834// short libcore.io.Memory.peekShort(long address)
835void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
836 CreateIntToIntLocations(arena_, invoke);
837}
838
839void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
840 Mips64Assembler* assembler = GetAssembler();
841 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
842 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
843
844 __ Lh(out, adr, 0);
845}
846
847// int libcore.io.Memory.peekInt(long address)
848void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
849 CreateIntToIntLocations(arena_, invoke);
850}
851
852void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
853 Mips64Assembler* assembler = GetAssembler();
854 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
855 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
856
857 __ Lw(out, adr, 0);
858}
859
860// long libcore.io.Memory.peekLong(long address)
861void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
862 CreateIntToIntLocations(arena_, invoke);
863}
864
865void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
866 Mips64Assembler* assembler = GetAssembler();
867 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
868 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
869
870 __ Ld(out, adr, 0);
871}
872
873static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
874 LocationSummary* locations = new (arena) LocationSummary(invoke,
875 LocationSummary::kNoCall,
876 kIntrinsified);
877 locations->SetInAt(0, Location::RequiresRegister());
878 locations->SetInAt(1, Location::RequiresRegister());
879}
880
881// void libcore.io.Memory.pokeByte(long address, byte value)
882void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
883 CreateIntIntToVoidLocations(arena_, invoke);
884}
885
886void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
887 Mips64Assembler* assembler = GetAssembler();
888 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
889 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
890
891 __ Sb(val, adr, 0);
892}
893
894// void libcore.io.Memory.pokeShort(long address, short value)
895void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
896 CreateIntIntToVoidLocations(arena_, invoke);
897}
898
899void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
900 Mips64Assembler* assembler = GetAssembler();
901 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
902 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
903
904 __ Sh(val, adr, 0);
905}
906
907// void libcore.io.Memory.pokeInt(long address, int value)
908void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
909 CreateIntIntToVoidLocations(arena_, invoke);
910}
911
912void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
913 Mips64Assembler* assembler = GetAssembler();
914 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
915 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
916
917 __ Sw(val, adr, 00);
918}
919
920// void libcore.io.Memory.pokeLong(long address, long value)
921void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
922 CreateIntIntToVoidLocations(arena_, invoke);
923}
924
925void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
926 Mips64Assembler* assembler = GetAssembler();
927 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
928 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
929
930 __ Sd(val, adr, 0);
931}
932
Chris Larsen49e55392015-09-04 16:04:03 -0700933// Thread java.lang.Thread.currentThread()
934void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
935 LocationSummary* locations = new (arena_) LocationSummary(invoke,
936 LocationSummary::kNoCall,
937 kIntrinsified);
938 locations->SetOut(Location::RequiresRegister());
939}
940
941void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
942 Mips64Assembler* assembler = GetAssembler();
943 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
944
945 __ LoadFromOffset(kLoadUnsignedWord,
946 out,
947 TR,
948 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
949}
950
Chris Larsen1360ada2015-09-04 23:38:16 -0700951static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
952 LocationSummary* locations = new (arena) LocationSummary(invoke,
953 LocationSummary::kNoCall,
954 kIntrinsified);
955 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
956 locations->SetInAt(1, Location::RequiresRegister());
957 locations->SetInAt(2, Location::RequiresRegister());
958 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
959}
960
961static void GenUnsafeGet(HInvoke* invoke,
962 Primitive::Type type,
963 bool is_volatile,
964 CodeGeneratorMIPS64* codegen) {
965 LocationSummary* locations = invoke->GetLocations();
966 DCHECK((type == Primitive::kPrimInt) ||
967 (type == Primitive::kPrimLong) ||
968 (type == Primitive::kPrimNot));
969 Mips64Assembler* assembler = codegen->GetAssembler();
970 // Object pointer.
971 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
972 // Long offset.
973 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
974 GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
975
976 __ Daddu(TMP, base, offset);
977 if (is_volatile) {
978 __ Sync(0);
979 }
980 switch (type) {
981 case Primitive::kPrimInt:
982 __ Lw(trg, TMP, 0);
983 break;
984
985 case Primitive::kPrimNot:
986 __ Lwu(trg, TMP, 0);
987 break;
988
989 case Primitive::kPrimLong:
990 __ Ld(trg, TMP, 0);
991 break;
992
993 default:
994 LOG(FATAL) << "Unsupported op size " << type;
995 UNREACHABLE();
996 }
997}
998
999// int sun.misc.Unsafe.getInt(Object o, long offset)
1000void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1001 CreateIntIntIntToIntLocations(arena_, invoke);
1002}
1003
1004void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1005 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
1006}
1007
1008// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1009void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1010 CreateIntIntIntToIntLocations(arena_, invoke);
1011}
1012
1013void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1014 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
1015}
1016
1017// long sun.misc.Unsafe.getLong(Object o, long offset)
1018void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1019 CreateIntIntIntToIntLocations(arena_, invoke);
1020}
1021
1022void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1023 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
1024}
1025
1026// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1027void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1028 CreateIntIntIntToIntLocations(arena_, invoke);
1029}
1030
1031void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1032 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
1033}
1034
1035// Object sun.misc.Unsafe.getObject(Object o, long offset)
1036void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1037 CreateIntIntIntToIntLocations(arena_, invoke);
1038}
1039
1040void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1041 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
1042}
1043
1044// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1045void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1046 CreateIntIntIntToIntLocations(arena_, invoke);
1047}
1048
1049void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1050 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
1051}
1052
1053static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1054 LocationSummary* locations = new (arena) LocationSummary(invoke,
1055 LocationSummary::kNoCall,
1056 kIntrinsified);
1057 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1058 locations->SetInAt(1, Location::RequiresRegister());
1059 locations->SetInAt(2, Location::RequiresRegister());
1060 locations->SetInAt(3, Location::RequiresRegister());
1061}
1062
1063static void GenUnsafePut(LocationSummary* locations,
1064 Primitive::Type type,
1065 bool is_volatile,
1066 bool is_ordered,
1067 CodeGeneratorMIPS64* codegen) {
1068 DCHECK((type == Primitive::kPrimInt) ||
1069 (type == Primitive::kPrimLong) ||
1070 (type == Primitive::kPrimNot));
1071 Mips64Assembler* assembler = codegen->GetAssembler();
1072 // Object pointer.
1073 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1074 // Long offset.
1075 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1076 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1077
1078 __ Daddu(TMP, base, offset);
1079 if (is_volatile || is_ordered) {
1080 __ Sync(0);
1081 }
1082 switch (type) {
1083 case Primitive::kPrimInt:
1084 case Primitive::kPrimNot:
1085 __ Sw(value, TMP, 0);
1086 break;
1087
1088 case Primitive::kPrimLong:
1089 __ Sd(value, TMP, 0);
1090 break;
1091
1092 default:
1093 LOG(FATAL) << "Unsupported op size " << type;
1094 UNREACHABLE();
1095 }
1096 if (is_volatile) {
1097 __ Sync(0);
1098 }
1099
1100 if (type == Primitive::kPrimNot) {
1101 codegen->MarkGCCard(base, value);
1102 }
1103}
1104
1105// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1106void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1107 CreateIntIntIntIntToVoid(arena_, invoke);
1108}
1109
1110void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
1111 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
1112}
1113
1114// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1115void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1116 CreateIntIntIntIntToVoid(arena_, invoke);
1117}
1118
1119void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1120 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
1121}
1122
1123// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1124void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1125 CreateIntIntIntIntToVoid(arena_, invoke);
1126}
1127
1128void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1129 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
1130}
1131
1132// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1133void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1134 CreateIntIntIntIntToVoid(arena_, invoke);
1135}
1136
1137void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1138 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
1139}
1140
1141// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1142void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1143 CreateIntIntIntIntToVoid(arena_, invoke);
1144}
1145
1146void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1147 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
1148}
1149
1150// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1151void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1152 CreateIntIntIntIntToVoid(arena_, invoke);
1153}
1154
1155void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1156 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
1157}
1158
1159// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1160void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1161 CreateIntIntIntIntToVoid(arena_, invoke);
1162}
1163
1164void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1165 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
1166}
1167
1168// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1169void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1170 CreateIntIntIntIntToVoid(arena_, invoke);
1171}
1172
1173void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1174 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
1175}
1176
1177// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1178void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1179 CreateIntIntIntIntToVoid(arena_, invoke);
1180}
1181
1182void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1183 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
1184}
1185
Chris Larsen9701c2e2015-09-04 17:22:47 -07001186// char java.lang.String.charAt(int index)
1187void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
1188 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1189 LocationSummary::kCallOnSlowPath,
1190 kIntrinsified);
1191 locations->SetInAt(0, Location::RequiresRegister());
1192 locations->SetInAt(1, Location::RequiresRegister());
1193 locations->SetOut(Location::SameAsFirstInput());
1194}
1195
1196void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
1197 LocationSummary* locations = invoke->GetLocations();
1198 Mips64Assembler* assembler = GetAssembler();
1199
1200 // Location of reference to data array
1201 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1202 // Location of count
1203 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1204
1205 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1206 GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
1207 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1208
1209 // TODO: Maybe we can support range check elimination. Overall,
1210 // though, I think it's not worth the cost.
1211 // TODO: For simplicity, the index parameter is requested in a
1212 // register, so different from Quick we will not optimize the
1213 // code for constants (which would save a register).
1214
1215 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1216 codegen_->AddSlowPath(slow_path);
1217
1218 // Load the string size
1219 __ Lw(TMP, obj, count_offset);
1220 codegen_->MaybeRecordImplicitNullCheck(invoke);
1221 // Revert to slow path if idx is too large, or negative
1222 __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
1223
1224 // out = obj[2*idx].
1225 __ Sll(TMP, idx, 1); // idx * 2
1226 __ Daddu(TMP, TMP, obj); // Address of char at location idx
1227 __ Lhu(out, TMP, value_offset); // Load char at location idx
1228
1229 __ Bind(slow_path->GetExitLabel());
1230}
1231
1232// int java.lang.String.compareTo(String anotherString)
1233void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1234 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1235 LocationSummary::kCall,
1236 kIntrinsified);
1237 InvokeRuntimeCallingConvention calling_convention;
1238 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1239 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1240 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1241 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1242}
1243
1244void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1245 Mips64Assembler* assembler = GetAssembler();
1246 LocationSummary* locations = invoke->GetLocations();
1247
1248 // Note that the null check must have been done earlier.
1249 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1250
1251 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1252 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1253 codegen_->AddSlowPath(slow_path);
1254 __ Beqzc(argument, slow_path->GetEntryLabel());
1255
1256 __ LoadFromOffset(kLoadDoubleword,
1257 TMP,
1258 TR,
1259 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize,
1260 pStringCompareTo).Int32Value());
1261 __ Jalr(TMP);
1262 __ Nop();
1263 __ Bind(slow_path->GetExitLabel());
1264}
1265
1266static void GenerateStringIndexOf(HInvoke* invoke,
1267 Mips64Assembler* assembler,
1268 CodeGeneratorMIPS64* codegen,
1269 ArenaAllocator* allocator,
1270 bool start_at_zero) {
1271 LocationSummary* locations = invoke->GetLocations();
1272 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1273
1274 // Note that the null check must have been done earlier.
1275 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1276
1277 // Check for code points > 0xFFFF. Either a slow-path check when we
1278 // don't know statically, or directly dispatch if we have a constant.
1279 SlowPathCodeMIPS64* slow_path = nullptr;
1280 if (invoke->InputAt(1)->IsIntConstant()) {
1281 if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
1282 // Always needs the slow-path. We could directly dispatch to it,
1283 // but this case should be rare, so for simplicity just put the
1284 // full slow-path down and branch unconditionally.
1285 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1286 codegen->AddSlowPath(slow_path);
1287 __ B(slow_path->GetEntryLabel());
1288 __ Bind(slow_path->GetExitLabel());
1289 return;
1290 }
1291 } else {
1292 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1293 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1294 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1295 codegen->AddSlowPath(slow_path);
1296 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1297 }
1298
1299 if (start_at_zero) {
1300 DCHECK_EQ(tmp_reg, A2);
1301 // Start-index = 0.
1302 __ Clear(tmp_reg);
1303 } else {
1304 __ Slt(TMP, A2, ZERO); // if fromIndex < 0
1305 __ Seleqz(A2, A2, TMP); // fromIndex = 0
1306 }
1307
1308 __ LoadFromOffset(kLoadDoubleword,
1309 TMP,
1310 TR,
1311 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pIndexOf).Int32Value());
1312 __ Jalr(TMP);
1313 __ Nop();
1314
1315 if (slow_path != nullptr) {
1316 __ Bind(slow_path->GetExitLabel());
1317 }
1318}
1319
1320// int java.lang.String.indexOf(int ch)
1321void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1322 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1323 LocationSummary::kCall,
1324 kIntrinsified);
1325 // We have a hand-crafted assembly stub that follows the runtime
1326 // calling convention. So it's best to align the inputs accordingly.
1327 InvokeRuntimeCallingConvention calling_convention;
1328 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1329 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1330 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1331 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1332
1333 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1334 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1335}
1336
1337void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1338 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1339}
1340
1341// int java.lang.String.indexOf(int ch, int fromIndex)
1342void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1343 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1344 LocationSummary::kCall,
1345 kIntrinsified);
1346 // We have a hand-crafted assembly stub that follows the runtime
1347 // calling convention. So it's best to align the inputs accordingly.
1348 InvokeRuntimeCallingConvention calling_convention;
1349 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1350 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1351 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1352 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1353 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1354}
1355
1356void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1357 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1358}
1359
1360// java.lang.String.String(byte[] bytes)
1361void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1362 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1363 LocationSummary::kCall,
1364 kIntrinsified);
1365 InvokeRuntimeCallingConvention calling_convention;
1366 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1367 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1368 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1369 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1370 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1371 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1372}
1373
1374void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1375 Mips64Assembler* assembler = GetAssembler();
1376 LocationSummary* locations = invoke->GetLocations();
1377
1378 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1379 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1380 codegen_->AddSlowPath(slow_path);
1381 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1382
1383 __ LoadFromOffset(kLoadDoubleword,
1384 TMP,
1385 TR,
1386 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromBytes).Int32Value());
1387 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1388 __ Jalr(TMP);
1389 __ Nop();
1390 __ Bind(slow_path->GetExitLabel());
1391}
1392
1393// java.lang.String.String(char[] value)
1394void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1395 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1396 LocationSummary::kCall,
1397 kIntrinsified);
1398 InvokeRuntimeCallingConvention calling_convention;
1399 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1400 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1401 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1402 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1403 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1404}
1405
1406void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1407 Mips64Assembler* assembler = GetAssembler();
1408
1409 __ LoadFromOffset(kLoadDoubleword,
1410 TMP,
1411 TR,
1412 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromChars).Int32Value());
1413 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1414 __ Jalr(TMP);
1415 __ Nop();
1416}
1417
1418// java.lang.String.String(String original)
1419void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1420 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1421 LocationSummary::kCall,
1422 kIntrinsified);
1423 InvokeRuntimeCallingConvention calling_convention;
1424 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1425 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1426 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1427 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1428 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1429}
1430
1431void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1432 Mips64Assembler* assembler = GetAssembler();
1433 LocationSummary* locations = invoke->GetLocations();
1434
1435 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1436 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1437 codegen_->AddSlowPath(slow_path);
1438 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1439
1440 __ LoadFromOffset(kLoadDoubleword,
1441 TMP,
1442 TR,
1443 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromString).Int32Value());
1444 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1445 __ Jalr(TMP);
1446 __ Nop();
1447 __ Bind(slow_path->GetExitLabel());
1448}
1449
Chris Larsen3039e382015-08-26 07:54:08 -07001450// Unimplemented intrinsics.
1451
1452#define UNIMPLEMENTED_INTRINSIC(Name) \
1453void IntrinsicLocationsBuilderMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1454} \
1455void IntrinsicCodeGeneratorMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1456}
1457
Chris Larsen3039e382015-08-26 07:54:08 -07001458UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
1459UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
Chris Larsen0b7ac982015-09-04 12:54:28 -07001460
Chris Larsen3039e382015-08-26 07:54:08 -07001461UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
1462UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
1463UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
Chris Larsen3039e382015-08-26 07:54:08 -07001464UNIMPLEMENTED_INTRINSIC(StringEquals)
Chris Larsen3039e382015-08-26 07:54:08 -07001465UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Chris Larsen3039e382015-08-26 07:54:08 -07001466UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
Chris Larsen3039e382015-08-26 07:54:08 -07001467
1468UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1469UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1470UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001471UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Chris Larsen3039e382015-08-26 07:54:08 -07001472
1473#undef UNIMPLEMENTED_INTRINSIC
1474
1475#undef __
1476
1477} // namespace mips64
1478} // namespace art