blob: 0ab0b80396779bda9db61911b4826f2909e34a1a [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips64/assembler_mips64.h"
28#include "utils/mips64/constants_mips64.h"
29
30namespace art {
31
32namespace mips64 {
33
34IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
39 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsen9701c2e2015-09-04 17:22:47 -070046#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS64* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
76 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
89 public:
90 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke) : invoke_(invoke) { }
91
92 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
93 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
94
95 __ Bind(GetEntryLabel());
96
97 SaveLiveRegisters(codegen, invoke_->GetLocations());
98
99 MoveArguments(invoke_, codegen);
100
101 if (invoke_->IsInvokeStaticOrDirect()) {
102 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
103 Location::RegisterLocation(A0));
104 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
105 } else {
106 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
107 UNREACHABLE();
108 }
109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
119 __ B(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
129};
130
131#undef __
132
Chris Larsen3039e382015-08-26 07:54:08 -0700133bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
150 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
151 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
152
153 if (is64bit) {
154 __ Dmfc1(out, in);
155 } else {
156 __ Mfc1(out, in);
157 }
158}
159
160// long java.lang.Double.doubleToRawLongBits(double)
161void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
162 CreateFPToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
166 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
167}
168
169// int java.lang.Float.floatToRawIntBits(float)
170void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
171 CreateFPToIntLocations(arena_, invoke);
172}
173
174void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
175 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
187 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
188 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
189
190 if (is64bit) {
191 __ Dmtc1(in, out);
192 } else {
193 __ Mtc1(in, out);
194 }
195}
196
197// double java.lang.Double.longBitsToDouble(long)
198void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
203 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
204}
205
206// float java.lang.Float.intBitsToFloat(int)
207void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
212 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
224 Primitive::Type type,
225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
230 case Primitive::kPrimShort:
231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
234 case Primitive::kPrimInt:
235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
238 case Primitive::kPrimLong:
239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
259 CreateIntToIntLocations(arena_, invoke);
260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
263 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
268 CreateIntToIntLocations(arena_, invoke);
269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
272 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsen0646da72015-09-22 16:02:40 -0700294 GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
299 CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsen0646da72015-09-22 16:02:40 -0700303 GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler());
304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 CreateIntToIntLocations(arena_, invoke);
328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
331 GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler());
332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
336 CreateIntToIntLocations(arena_, invoke);
337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
340 GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
Chris Larsen9aebff22015-09-22 17:54:15 -0700343static void GenRotateRight(HInvoke* invoke,
344 Primitive::Type type,
345 Mips64Assembler* assembler) {
346 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
347
348 LocationSummary* locations = invoke->GetLocations();
349 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
350 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
351
352 if (invoke->InputAt(1)->IsIntConstant()) {
353 uint32_t shift = static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
354 if (type == Primitive::kPrimInt) {
355 shift &= 0x1f;
356 __ Rotr(out, in, shift);
357 } else {
358 shift &= 0x3f;
359 if (shift < 32) {
360 __ Drotr(out, in, shift);
361 } else {
362 shift &= 0x1f;
363 __ Drotr32(out, in, shift);
364 }
365 }
366 } else {
367 GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
368 if (type == Primitive::kPrimInt) {
369 __ Rotrv(out, in, shamt);
370 } else {
371 __ Drotrv(out, in, shamt);
372 }
373 }
374}
375
376// int java.lang.Integer.rotateRight(int i, int distance)
377void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
378 LocationSummary* locations = new (arena_) LocationSummary(invoke,
379 LocationSummary::kNoCall,
380 kIntrinsified);
381 locations->SetInAt(0, Location::RequiresRegister());
382 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
383 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
384}
385
386void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
387 GenRotateRight(invoke, Primitive::kPrimInt, GetAssembler());
388}
389
Chris Larsen81284372015-10-21 15:28:53 -0700390// long java.lang.Long.rotateRight(long i, int distance)
Chris Larsen9aebff22015-09-22 17:54:15 -0700391void IntrinsicLocationsBuilderMIPS64::VisitLongRotateRight(HInvoke* invoke) {
392 LocationSummary* locations = new (arena_) LocationSummary(invoke,
393 LocationSummary::kNoCall,
394 kIntrinsified);
395 locations->SetInAt(0, Location::RequiresRegister());
396 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
397 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
398}
399
400void IntrinsicCodeGeneratorMIPS64::VisitLongRotateRight(HInvoke* invoke) {
401 GenRotateRight(invoke, Primitive::kPrimLong, GetAssembler());
402}
403
Chris Larsen0f8f8642015-10-02 17:25:58 -0700404static void GenRotateLeft(HInvoke* invoke,
405 Primitive::Type type,
406 Mips64Assembler* assembler) {
407 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
408
409 LocationSummary* locations = invoke->GetLocations();
410 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
411 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
412
413 if (invoke->InputAt(1)->IsIntConstant()) {
414 int32_t shift = -static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
415 if (type == Primitive::kPrimInt) {
416 shift &= 0x1f;
417 __ Rotr(out, in, shift);
418 } else {
419 shift &= 0x3f;
420 if (shift < 32) {
421 __ Drotr(out, in, shift);
422 } else {
423 shift &= 0x1f;
424 __ Drotr32(out, in, shift);
425 }
426 }
427 } else {
428 GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
429 if (type == Primitive::kPrimInt) {
430 __ Subu(TMP, ZERO, shamt);
431 __ Rotrv(out, in, TMP);
432 } else {
433 __ Dsubu(TMP, ZERO, shamt);
434 __ Drotrv(out, in, TMP);
435 }
436 }
437}
438
439// int java.lang.Integer.rotateLeft(int i, int distance)
440void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateLeft(HInvoke* invoke) {
441 LocationSummary* locations = new (arena_) LocationSummary(invoke,
442 LocationSummary::kNoCall,
443 kIntrinsified);
444 locations->SetInAt(0, Location::RequiresRegister());
445 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
446 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
447}
448
449void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateLeft(HInvoke* invoke) {
450 GenRotateLeft(invoke, Primitive::kPrimInt, GetAssembler());
451}
452
Chris Larsen81284372015-10-21 15:28:53 -0700453// long java.lang.Long.rotateLeft(long i, int distance)
Chris Larsen0f8f8642015-10-02 17:25:58 -0700454void IntrinsicLocationsBuilderMIPS64::VisitLongRotateLeft(HInvoke* invoke) {
455 LocationSummary* locations = new (arena_) LocationSummary(invoke,
456 LocationSummary::kNoCall,
457 kIntrinsified);
458 locations->SetInAt(0, Location::RequiresRegister());
459 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
460 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
461}
462
463void IntrinsicCodeGeneratorMIPS64::VisitLongRotateLeft(HInvoke* invoke) {
464 GenRotateLeft(invoke, Primitive::kPrimLong, GetAssembler());
465}
466
Chris Larsen3039e382015-08-26 07:54:08 -0700467static void GenReverse(LocationSummary* locations,
468 Primitive::Type type,
469 Mips64Assembler* assembler) {
470 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
471
472 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
473 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
474
475 if (type == Primitive::kPrimInt) {
476 __ Rotr(out, in, 16);
477 __ Wsbh(out, out);
478 __ Bitswap(out, out);
479 } else {
480 __ Dsbh(out, in);
481 __ Dshd(out, out);
482 __ Dbitswap(out, out);
483 }
484}
485
486// int java.lang.Integer.reverse(int)
487void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
488 CreateIntToIntLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
492 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
493}
494
495// long java.lang.Long.reverse(long)
496void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
497 CreateIntToIntLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
501 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
502}
503
Chris Larsen0b7ac982015-09-04 12:54:28 -0700504static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
505 LocationSummary* locations = new (arena) LocationSummary(invoke,
506 LocationSummary::kNoCall,
507 kIntrinsified);
508 locations->SetInAt(0, Location::RequiresFpuRegister());
509 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
510}
511
512static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
513 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
514 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
515
516 if (is64bit) {
517 __ AbsD(out, in);
518 } else {
519 __ AbsS(out, in);
520 }
521}
522
523// double java.lang.Math.abs(double)
524void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
525 CreateFPToFPLocations(arena_, invoke);
526}
527
528void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
529 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
530}
531
532// float java.lang.Math.abs(float)
533void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
534 CreateFPToFPLocations(arena_, invoke);
535}
536
537void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
538 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
539}
540
541static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
542 LocationSummary* locations = new (arena) LocationSummary(invoke,
543 LocationSummary::kNoCall,
544 kIntrinsified);
545 locations->SetInAt(0, Location::RequiresRegister());
546 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
547}
548
549static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
550 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
551 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
552
553 if (is64bit) {
554 __ Dsra32(AT, in, 31);
555 __ Xor(out, in, AT);
556 __ Dsubu(out, out, AT);
557 } else {
558 __ Sra(AT, in, 31);
559 __ Xor(out, in, AT);
560 __ Subu(out, out, AT);
561 }
562}
563
564// int java.lang.Math.abs(int)
565void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
566 CreateIntToInt(arena_, invoke);
567}
568
569void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
570 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
571}
572
573// long java.lang.Math.abs(long)
574void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
575 CreateIntToInt(arena_, invoke);
576}
577
578void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
579 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
580}
581
582static void GenMinMaxFP(LocationSummary* locations,
583 bool is_min,
584 bool is_double,
585 Mips64Assembler* assembler) {
586 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
587 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
588 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
589
590 if (is_double) {
591 if (is_min) {
592 __ MinD(out, lhs, rhs);
593 } else {
594 __ MaxD(out, lhs, rhs);
595 }
596 } else {
597 if (is_min) {
598 __ MinS(out, lhs, rhs);
599 } else {
600 __ MaxS(out, lhs, rhs);
601 }
602 }
603}
604
605static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
606 LocationSummary* locations = new (arena) LocationSummary(invoke,
607 LocationSummary::kNoCall,
608 kIntrinsified);
609 locations->SetInAt(0, Location::RequiresFpuRegister());
610 locations->SetInAt(1, Location::RequiresFpuRegister());
611 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
612}
613
614// double java.lang.Math.min(double, double)
615void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
616 CreateFPFPToFPLocations(arena_, invoke);
617}
618
619void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
620 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
621}
622
623// float java.lang.Math.min(float, float)
624void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
625 CreateFPFPToFPLocations(arena_, invoke);
626}
627
628void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
629 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
630}
631
632// double java.lang.Math.max(double, double)
633void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
634 CreateFPFPToFPLocations(arena_, invoke);
635}
636
637void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
638 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
639}
640
641// float java.lang.Math.max(float, float)
642void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
643 CreateFPFPToFPLocations(arena_, invoke);
644}
645
646void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
647 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
648}
649
650static void GenMinMax(LocationSummary* locations,
651 bool is_min,
652 Mips64Assembler* assembler) {
653 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
654 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
655 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
656
Chris Larsen14500822015-10-01 11:35:18 -0700657 // Some architectures, such as ARM and MIPS (prior to r6), have a
658 // conditional move instruction which only changes the target
659 // (output) register if the condition is true (MIPS prior to r6 had
660 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
661 // change the target (output) register. If the condition is true the
662 // output register gets the contents of the "rs" register; otherwise,
663 // the output register is set to zero. One consequence of this is
664 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
665 // needs to use a pair of SELEQZ/SELNEZ instructions. After
666 // executing this pair of instructions one of the output registers
667 // from the pair will necessarily contain zero. Then the code ORs the
668 // output registers from the SELEQZ/SELNEZ instructions to get the
669 // final result.
670 //
671 // The initial test to see if the output register is same as the
672 // first input register is needed to make sure that value in the
673 // first input register isn't clobbered before we've finished
674 // computing the output value. The logic in the corresponding else
675 // clause performs the same task but makes sure the second input
676 // register isn't clobbered in the event that it's the same register
677 // as the output register; the else clause also handles the case
678 // where the output register is distinct from both the first, and the
679 // second input registers.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700680 if (out == lhs) {
681 __ Slt(AT, rhs, lhs);
682 if (is_min) {
683 __ Seleqz(out, lhs, AT);
684 __ Selnez(AT, rhs, AT);
685 } else {
686 __ Selnez(out, lhs, AT);
687 __ Seleqz(AT, rhs, AT);
688 }
689 } else {
690 __ Slt(AT, lhs, rhs);
691 if (is_min) {
692 __ Seleqz(out, rhs, AT);
693 __ Selnez(AT, lhs, AT);
694 } else {
695 __ Selnez(out, rhs, AT);
696 __ Seleqz(AT, lhs, AT);
697 }
698 }
699 __ Or(out, out, AT);
700}
701
702static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
703 LocationSummary* locations = new (arena) LocationSummary(invoke,
704 LocationSummary::kNoCall,
705 kIntrinsified);
706 locations->SetInAt(0, Location::RequiresRegister());
707 locations->SetInAt(1, Location::RequiresRegister());
708 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
709}
710
711// int java.lang.Math.min(int, int)
712void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
713 CreateIntIntToIntLocations(arena_, invoke);
714}
715
716void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
717 GenMinMax(invoke->GetLocations(), true, GetAssembler());
718}
719
720// long java.lang.Math.min(long, long)
721void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
722 CreateIntIntToIntLocations(arena_, invoke);
723}
724
725void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
726 GenMinMax(invoke->GetLocations(), true, GetAssembler());
727}
728
729// int java.lang.Math.max(int, int)
730void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
731 CreateIntIntToIntLocations(arena_, invoke);
732}
733
734void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
735 GenMinMax(invoke->GetLocations(), false, GetAssembler());
736}
737
738// long java.lang.Math.max(long, long)
739void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
740 CreateIntIntToIntLocations(arena_, invoke);
741}
742
743void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
744 GenMinMax(invoke->GetLocations(), false, GetAssembler());
745}
746
747// double java.lang.Math.sqrt(double)
748void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
749 CreateFPToFPLocations(arena_, invoke);
750}
751
752void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
753 LocationSummary* locations = invoke->GetLocations();
754 Mips64Assembler* assembler = GetAssembler();
755 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
756 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
757
758 __ SqrtD(out, in);
759}
760
Chris Larsen81284372015-10-21 15:28:53 -0700761static void CreateFPToFP(ArenaAllocator* arena,
762 HInvoke* invoke,
763 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700764 LocationSummary* locations = new (arena) LocationSummary(invoke,
765 LocationSummary::kNoCall,
766 kIntrinsified);
767 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700768 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700769}
770
771// double java.lang.Math.rint(double)
772void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700773 CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700774}
775
776void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
777 LocationSummary* locations = invoke->GetLocations();
778 Mips64Assembler* assembler = GetAssembler();
779 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
780 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
781
782 __ RintD(out, in);
783}
784
785// double java.lang.Math.floor(double)
786void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
787 CreateFPToFP(arena_, invoke);
788}
789
Chris Larsen14500822015-10-01 11:35:18 -0700790const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
791 kPositiveInfinity |
792 kNegativeZero |
793 kNegativeInfinity |
794 kQuietNaN |
795 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700796
Chris Larsen81284372015-10-21 15:28:53 -0700797enum FloatRoundingMode {
798 kFloor,
799 kCeil,
800};
801
802static void GenRoundingMode(LocationSummary* locations,
803 FloatRoundingMode mode,
804 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700805 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
806 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
807
Chris Larsen81284372015-10-21 15:28:53 -0700808 DCHECK_NE(in, out);
809
Chris Larsen0b7ac982015-09-04 12:54:28 -0700810 Label done;
811
Chris Larsen81284372015-10-21 15:28:53 -0700812 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700813 // if in.isNaN || in.isInfinite || in.isZero {
814 // return in;
815 // }
816 __ ClassD(out, in);
817 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700818 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700819 __ MovD(out, in);
820 __ Bnezc(AT, &done);
821
Chris Larsen81284372015-10-21 15:28:53 -0700822 // Long outLong = floor/ceil(in);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700823 // if outLong == Long.MAX_VALUE {
Chris Larsen81284372015-10-21 15:28:53 -0700824 // // floor()/ceil() has almost certainly returned a value
825 // // which can't be successfully represented as a signed
826 // // 64-bit number. Java expects that the input value will
827 // // be returned in these cases.
828 // // There is also a small probability that floor(in)/ceil(in)
829 // // correctly truncates/rounds up the input value to
830 // // Long.MAX_VALUE. In that case, this exception handling
831 // // code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700832 // return in;
833 // }
Chris Larsen81284372015-10-21 15:28:53 -0700834 if (mode == kFloor) {
835 __ FloorLD(out, in);
836 } else if (mode == kCeil) {
837 __ CeilLD(out, in);
838 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700839 __ Dmfc1(AT, out);
840 __ MovD(out, in);
841 __ LoadConst64(TMP, kPrimLongMax);
842 __ Beqc(AT, TMP, &done);
843
844 // double out = outLong;
845 // return out;
846 __ Dmtc1(AT, out);
847 __ Cvtdl(out, out);
848 __ Bind(&done);
849 // }
850}
851
Chris Larsen81284372015-10-21 15:28:53 -0700852void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
853 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
854}
855
Chris Larsen0b7ac982015-09-04 12:54:28 -0700856// double java.lang.Math.ceil(double)
857void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
858 CreateFPToFP(arena_, invoke);
859}
860
861void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700862 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700863}
864
Chris Larsen70fb1f42015-09-04 10:15:27 -0700865// byte libcore.io.Memory.peekByte(long address)
866void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
867 CreateIntToIntLocations(arena_, invoke);
868}
869
870void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
871 Mips64Assembler* assembler = GetAssembler();
872 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
873 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
874
875 __ Lb(out, adr, 0);
876}
877
878// short libcore.io.Memory.peekShort(long address)
879void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
880 CreateIntToIntLocations(arena_, invoke);
881}
882
883void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
884 Mips64Assembler* assembler = GetAssembler();
885 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
886 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
887
888 __ Lh(out, adr, 0);
889}
890
891// int libcore.io.Memory.peekInt(long address)
892void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
893 CreateIntToIntLocations(arena_, invoke);
894}
895
896void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
897 Mips64Assembler* assembler = GetAssembler();
898 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
899 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
900
901 __ Lw(out, adr, 0);
902}
903
904// long libcore.io.Memory.peekLong(long address)
905void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
906 CreateIntToIntLocations(arena_, invoke);
907}
908
909void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
910 Mips64Assembler* assembler = GetAssembler();
911 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
912 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
913
914 __ Ld(out, adr, 0);
915}
916
917static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
918 LocationSummary* locations = new (arena) LocationSummary(invoke,
919 LocationSummary::kNoCall,
920 kIntrinsified);
921 locations->SetInAt(0, Location::RequiresRegister());
922 locations->SetInAt(1, Location::RequiresRegister());
923}
924
925// void libcore.io.Memory.pokeByte(long address, byte value)
926void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
927 CreateIntIntToVoidLocations(arena_, invoke);
928}
929
930void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
931 Mips64Assembler* assembler = GetAssembler();
932 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
933 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
934
935 __ Sb(val, adr, 0);
936}
937
938// void libcore.io.Memory.pokeShort(long address, short value)
939void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
940 CreateIntIntToVoidLocations(arena_, invoke);
941}
942
943void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
944 Mips64Assembler* assembler = GetAssembler();
945 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
946 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
947
948 __ Sh(val, adr, 0);
949}
950
951// void libcore.io.Memory.pokeInt(long address, int value)
952void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
953 CreateIntIntToVoidLocations(arena_, invoke);
954}
955
956void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
957 Mips64Assembler* assembler = GetAssembler();
958 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
959 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
960
961 __ Sw(val, adr, 00);
962}
963
964// void libcore.io.Memory.pokeLong(long address, long value)
965void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
966 CreateIntIntToVoidLocations(arena_, invoke);
967}
968
969void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
970 Mips64Assembler* assembler = GetAssembler();
971 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
972 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
973
974 __ Sd(val, adr, 0);
975}
976
Chris Larsen49e55392015-09-04 16:04:03 -0700977// Thread java.lang.Thread.currentThread()
978void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
979 LocationSummary* locations = new (arena_) LocationSummary(invoke,
980 LocationSummary::kNoCall,
981 kIntrinsified);
982 locations->SetOut(Location::RequiresRegister());
983}
984
985void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
986 Mips64Assembler* assembler = GetAssembler();
987 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
988
989 __ LoadFromOffset(kLoadUnsignedWord,
990 out,
991 TR,
992 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
993}
994
Chris Larsen1360ada2015-09-04 23:38:16 -0700995static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
996 LocationSummary* locations = new (arena) LocationSummary(invoke,
997 LocationSummary::kNoCall,
998 kIntrinsified);
999 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1000 locations->SetInAt(1, Location::RequiresRegister());
1001 locations->SetInAt(2, Location::RequiresRegister());
1002 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1003}
1004
1005static void GenUnsafeGet(HInvoke* invoke,
1006 Primitive::Type type,
1007 bool is_volatile,
1008 CodeGeneratorMIPS64* codegen) {
1009 LocationSummary* locations = invoke->GetLocations();
1010 DCHECK((type == Primitive::kPrimInt) ||
1011 (type == Primitive::kPrimLong) ||
1012 (type == Primitive::kPrimNot));
1013 Mips64Assembler* assembler = codegen->GetAssembler();
1014 // Object pointer.
1015 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1016 // Long offset.
1017 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1018 GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
1019
1020 __ Daddu(TMP, base, offset);
1021 if (is_volatile) {
1022 __ Sync(0);
1023 }
1024 switch (type) {
1025 case Primitive::kPrimInt:
1026 __ Lw(trg, TMP, 0);
1027 break;
1028
1029 case Primitive::kPrimNot:
1030 __ Lwu(trg, TMP, 0);
1031 break;
1032
1033 case Primitive::kPrimLong:
1034 __ Ld(trg, TMP, 0);
1035 break;
1036
1037 default:
1038 LOG(FATAL) << "Unsupported op size " << type;
1039 UNREACHABLE();
1040 }
1041}
1042
1043// int sun.misc.Unsafe.getInt(Object o, long offset)
1044void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1045 CreateIntIntIntToIntLocations(arena_, invoke);
1046}
1047
1048void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1049 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
1050}
1051
1052// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1053void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1054 CreateIntIntIntToIntLocations(arena_, invoke);
1055}
1056
1057void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1058 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
1059}
1060
1061// long sun.misc.Unsafe.getLong(Object o, long offset)
1062void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1063 CreateIntIntIntToIntLocations(arena_, invoke);
1064}
1065
1066void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1067 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
1068}
1069
1070// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1071void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1072 CreateIntIntIntToIntLocations(arena_, invoke);
1073}
1074
1075void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1076 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
1077}
1078
1079// Object sun.misc.Unsafe.getObject(Object o, long offset)
1080void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1081 CreateIntIntIntToIntLocations(arena_, invoke);
1082}
1083
1084void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1085 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
1086}
1087
1088// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1089void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1090 CreateIntIntIntToIntLocations(arena_, invoke);
1091}
1092
1093void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1094 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
1095}
1096
1097static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1098 LocationSummary* locations = new (arena) LocationSummary(invoke,
1099 LocationSummary::kNoCall,
1100 kIntrinsified);
1101 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1102 locations->SetInAt(1, Location::RequiresRegister());
1103 locations->SetInAt(2, Location::RequiresRegister());
1104 locations->SetInAt(3, Location::RequiresRegister());
1105}
1106
1107static void GenUnsafePut(LocationSummary* locations,
1108 Primitive::Type type,
1109 bool is_volatile,
1110 bool is_ordered,
1111 CodeGeneratorMIPS64* codegen) {
1112 DCHECK((type == Primitive::kPrimInt) ||
1113 (type == Primitive::kPrimLong) ||
1114 (type == Primitive::kPrimNot));
1115 Mips64Assembler* assembler = codegen->GetAssembler();
1116 // Object pointer.
1117 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1118 // Long offset.
1119 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1120 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1121
1122 __ Daddu(TMP, base, offset);
1123 if (is_volatile || is_ordered) {
1124 __ Sync(0);
1125 }
1126 switch (type) {
1127 case Primitive::kPrimInt:
1128 case Primitive::kPrimNot:
1129 __ Sw(value, TMP, 0);
1130 break;
1131
1132 case Primitive::kPrimLong:
1133 __ Sd(value, TMP, 0);
1134 break;
1135
1136 default:
1137 LOG(FATAL) << "Unsupported op size " << type;
1138 UNREACHABLE();
1139 }
1140 if (is_volatile) {
1141 __ Sync(0);
1142 }
1143
1144 if (type == Primitive::kPrimNot) {
1145 codegen->MarkGCCard(base, value);
1146 }
1147}
1148
1149// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1150void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1151 CreateIntIntIntIntToVoid(arena_, invoke);
1152}
1153
1154void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
1155 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
1156}
1157
1158// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1159void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1160 CreateIntIntIntIntToVoid(arena_, invoke);
1161}
1162
1163void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1164 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
1165}
1166
1167// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1168void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1169 CreateIntIntIntIntToVoid(arena_, invoke);
1170}
1171
1172void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1173 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
1174}
1175
1176// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1177void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1178 CreateIntIntIntIntToVoid(arena_, invoke);
1179}
1180
1181void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1182 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
1183}
1184
1185// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1186void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1187 CreateIntIntIntIntToVoid(arena_, invoke);
1188}
1189
1190void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1191 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
1192}
1193
1194// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1195void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1196 CreateIntIntIntIntToVoid(arena_, invoke);
1197}
1198
1199void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1200 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
1201}
1202
1203// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1204void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1205 CreateIntIntIntIntToVoid(arena_, invoke);
1206}
1207
1208void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1209 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
1210}
1211
1212// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1213void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1214 CreateIntIntIntIntToVoid(arena_, invoke);
1215}
1216
1217void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1218 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
1219}
1220
1221// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1222void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1223 CreateIntIntIntIntToVoid(arena_, invoke);
1224}
1225
1226void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1227 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
1228}
1229
Chris Larsen9701c2e2015-09-04 17:22:47 -07001230// char java.lang.String.charAt(int index)
1231void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
1232 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1233 LocationSummary::kCallOnSlowPath,
1234 kIntrinsified);
1235 locations->SetInAt(0, Location::RequiresRegister());
1236 locations->SetInAt(1, Location::RequiresRegister());
1237 locations->SetOut(Location::SameAsFirstInput());
1238}
1239
1240void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
1241 LocationSummary* locations = invoke->GetLocations();
1242 Mips64Assembler* assembler = GetAssembler();
1243
1244 // Location of reference to data array
1245 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1246 // Location of count
1247 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1248
1249 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1250 GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
1251 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1252
1253 // TODO: Maybe we can support range check elimination. Overall,
1254 // though, I think it's not worth the cost.
1255 // TODO: For simplicity, the index parameter is requested in a
1256 // register, so different from Quick we will not optimize the
1257 // code for constants (which would save a register).
1258
1259 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1260 codegen_->AddSlowPath(slow_path);
1261
1262 // Load the string size
1263 __ Lw(TMP, obj, count_offset);
1264 codegen_->MaybeRecordImplicitNullCheck(invoke);
1265 // Revert to slow path if idx is too large, or negative
1266 __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
1267
1268 // out = obj[2*idx].
1269 __ Sll(TMP, idx, 1); // idx * 2
1270 __ Daddu(TMP, TMP, obj); // Address of char at location idx
1271 __ Lhu(out, TMP, value_offset); // Load char at location idx
1272
1273 __ Bind(slow_path->GetExitLabel());
1274}
1275
1276// int java.lang.String.compareTo(String anotherString)
1277void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1278 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1279 LocationSummary::kCall,
1280 kIntrinsified);
1281 InvokeRuntimeCallingConvention calling_convention;
1282 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1283 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1284 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1285 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1286}
1287
1288void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1289 Mips64Assembler* assembler = GetAssembler();
1290 LocationSummary* locations = invoke->GetLocations();
1291
1292 // Note that the null check must have been done earlier.
1293 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1294
1295 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1296 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1297 codegen_->AddSlowPath(slow_path);
1298 __ Beqzc(argument, slow_path->GetEntryLabel());
1299
1300 __ LoadFromOffset(kLoadDoubleword,
1301 TMP,
1302 TR,
1303 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize,
1304 pStringCompareTo).Int32Value());
1305 __ Jalr(TMP);
1306 __ Nop();
1307 __ Bind(slow_path->GetExitLabel());
1308}
1309
1310static void GenerateStringIndexOf(HInvoke* invoke,
1311 Mips64Assembler* assembler,
1312 CodeGeneratorMIPS64* codegen,
1313 ArenaAllocator* allocator,
1314 bool start_at_zero) {
1315 LocationSummary* locations = invoke->GetLocations();
1316 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1317
1318 // Note that the null check must have been done earlier.
1319 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1320
1321 // Check for code points > 0xFFFF. Either a slow-path check when we
1322 // don't know statically, or directly dispatch if we have a constant.
1323 SlowPathCodeMIPS64* slow_path = nullptr;
1324 if (invoke->InputAt(1)->IsIntConstant()) {
1325 if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
1326 // Always needs the slow-path. We could directly dispatch to it,
1327 // but this case should be rare, so for simplicity just put the
1328 // full slow-path down and branch unconditionally.
1329 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1330 codegen->AddSlowPath(slow_path);
1331 __ B(slow_path->GetEntryLabel());
1332 __ Bind(slow_path->GetExitLabel());
1333 return;
1334 }
1335 } else {
1336 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1337 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1338 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1339 codegen->AddSlowPath(slow_path);
1340 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1341 }
1342
1343 if (start_at_zero) {
1344 DCHECK_EQ(tmp_reg, A2);
1345 // Start-index = 0.
1346 __ Clear(tmp_reg);
1347 } else {
1348 __ Slt(TMP, A2, ZERO); // if fromIndex < 0
1349 __ Seleqz(A2, A2, TMP); // fromIndex = 0
1350 }
1351
1352 __ LoadFromOffset(kLoadDoubleword,
1353 TMP,
1354 TR,
1355 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pIndexOf).Int32Value());
1356 __ Jalr(TMP);
1357 __ Nop();
1358
1359 if (slow_path != nullptr) {
1360 __ Bind(slow_path->GetExitLabel());
1361 }
1362}
1363
1364// int java.lang.String.indexOf(int ch)
1365void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1366 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1367 LocationSummary::kCall,
1368 kIntrinsified);
1369 // We have a hand-crafted assembly stub that follows the runtime
1370 // calling convention. So it's best to align the inputs accordingly.
1371 InvokeRuntimeCallingConvention calling_convention;
1372 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1373 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1374 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1375 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1376
1377 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1378 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1379}
1380
1381void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1382 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1383}
1384
1385// int java.lang.String.indexOf(int ch, int fromIndex)
1386void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1387 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1388 LocationSummary::kCall,
1389 kIntrinsified);
1390 // We have a hand-crafted assembly stub that follows the runtime
1391 // calling convention. So it's best to align the inputs accordingly.
1392 InvokeRuntimeCallingConvention calling_convention;
1393 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1394 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1395 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1396 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1397 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1398}
1399
1400void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1401 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1402}
1403
1404// java.lang.String.String(byte[] bytes)
1405void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1406 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1407 LocationSummary::kCall,
1408 kIntrinsified);
1409 InvokeRuntimeCallingConvention calling_convention;
1410 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1411 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1412 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1413 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1414 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1415 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1416}
1417
1418void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1419 Mips64Assembler* assembler = GetAssembler();
1420 LocationSummary* locations = invoke->GetLocations();
1421
1422 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1423 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1424 codegen_->AddSlowPath(slow_path);
1425 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1426
1427 __ LoadFromOffset(kLoadDoubleword,
1428 TMP,
1429 TR,
1430 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromBytes).Int32Value());
1431 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1432 __ Jalr(TMP);
1433 __ Nop();
1434 __ Bind(slow_path->GetExitLabel());
1435}
1436
1437// java.lang.String.String(char[] value)
1438void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1439 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1440 LocationSummary::kCall,
1441 kIntrinsified);
1442 InvokeRuntimeCallingConvention calling_convention;
1443 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1444 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1445 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1446 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1447 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1448}
1449
1450void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1451 Mips64Assembler* assembler = GetAssembler();
1452
1453 __ LoadFromOffset(kLoadDoubleword,
1454 TMP,
1455 TR,
1456 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromChars).Int32Value());
1457 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1458 __ Jalr(TMP);
1459 __ Nop();
1460}
1461
1462// java.lang.String.String(String original)
1463void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1464 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1465 LocationSummary::kCall,
1466 kIntrinsified);
1467 InvokeRuntimeCallingConvention calling_convention;
1468 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1469 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1470 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1471 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1472 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1473}
1474
1475void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1476 Mips64Assembler* assembler = GetAssembler();
1477 LocationSummary* locations = invoke->GetLocations();
1478
1479 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1480 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1481 codegen_->AddSlowPath(slow_path);
1482 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1483
1484 __ LoadFromOffset(kLoadDoubleword,
1485 TMP,
1486 TR,
1487 QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromString).Int32Value());
1488 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1489 __ Jalr(TMP);
1490 __ Nop();
1491 __ Bind(slow_path->GetExitLabel());
1492}
1493
Chris Larsen3039e382015-08-26 07:54:08 -07001494// Unimplemented intrinsics.
1495
1496#define UNIMPLEMENTED_INTRINSIC(Name) \
1497void IntrinsicLocationsBuilderMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1498} \
1499void IntrinsicCodeGeneratorMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1500}
1501
Chris Larsen3039e382015-08-26 07:54:08 -07001502UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
1503UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
Chris Larsen0b7ac982015-09-04 12:54:28 -07001504
Chris Larsen3039e382015-08-26 07:54:08 -07001505UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
1506UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
1507UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
Chris Larsen3039e382015-08-26 07:54:08 -07001508UNIMPLEMENTED_INTRINSIC(StringEquals)
Chris Larsen3039e382015-08-26 07:54:08 -07001509
1510UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1511UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1512UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001513UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Chris Larsen3039e382015-08-26 07:54:08 -07001514
1515#undef UNIMPLEMENTED_INTRINSIC
1516
1517#undef __
1518
1519} // namespace mips64
1520} // namespace art