blob: 78b436079713906ff3c0778157e7e423df094cfd [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips64/assembler_mips64.h"
28#include "utils/mips64/constants_mips64.h"
29
30namespace art {
31
32namespace mips64 {
33
34IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
39 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
Chris Larsen9701c2e2015-09-04 17:22:47 -070046#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS64* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
76 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
89 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000090 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
91 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070092
93 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
94 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
95
96 __ Bind(GetEntryLabel());
97
98 SaveLiveRegisters(codegen, invoke_->GetLocations());
99
100 MoveArguments(invoke_, codegen);
101
102 if (invoke_->IsInvokeStaticOrDirect()) {
103 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
104 Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700105 } else {
Alexey Frunze53afca12015-11-05 16:34:23 -0800106 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0));
Chris Larsen9701c2e2015-09-04 17:22:47 -0700107 }
Alexey Frunze53afca12015-11-05 16:34:23 -0800108 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700119 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
129};
130
131#undef __
132
Chris Larsen3039e382015-08-26 07:54:08 -0700133bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
150 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
151 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
152
153 if (is64bit) {
154 __ Dmfc1(out, in);
155 } else {
156 __ Mfc1(out, in);
157 }
158}
159
160// long java.lang.Double.doubleToRawLongBits(double)
161void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
162 CreateFPToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000166 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700167}
168
169// int java.lang.Float.floatToRawIntBits(float)
170void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
171 CreateFPToIntLocations(arena_, invoke);
172}
173
174void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000175 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
187 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
188 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
189
190 if (is64bit) {
191 __ Dmtc1(in, out);
192 } else {
193 __ Mtc1(in, out);
194 }
195}
196
197// double java.lang.Double.longBitsToDouble(long)
198void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000203 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700204}
205
206// float java.lang.Float.intBitsToFloat(int)
207void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000212 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
224 Primitive::Type type,
225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
230 case Primitive::kPrimShort:
231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
234 case Primitive::kPrimInt:
235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
238 case Primitive::kPrimLong:
239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
259 CreateIntToIntLocations(arena_, invoke);
260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
263 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
268 CreateIntToIntLocations(arena_, invoke);
269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
272 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
299 CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 CreateIntToIntLocations(arena_, invoke);
328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
336 CreateIntToIntLocations(arena_, invoke);
337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
Chris Larsen9aebff22015-09-22 17:54:15 -0700343static void GenRotateRight(HInvoke* invoke,
344 Primitive::Type type,
345 Mips64Assembler* assembler) {
346 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
347
348 LocationSummary* locations = invoke->GetLocations();
349 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
350 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
351
352 if (invoke->InputAt(1)->IsIntConstant()) {
353 uint32_t shift = static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
354 if (type == Primitive::kPrimInt) {
355 shift &= 0x1f;
356 __ Rotr(out, in, shift);
357 } else {
358 shift &= 0x3f;
359 if (shift < 32) {
360 __ Drotr(out, in, shift);
361 } else {
362 shift &= 0x1f;
363 __ Drotr32(out, in, shift);
364 }
365 }
366 } else {
367 GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
368 if (type == Primitive::kPrimInt) {
369 __ Rotrv(out, in, shamt);
370 } else {
371 __ Drotrv(out, in, shamt);
372 }
373 }
374}
375
376// int java.lang.Integer.rotateRight(int i, int distance)
377void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
378 LocationSummary* locations = new (arena_) LocationSummary(invoke,
379 LocationSummary::kNoCall,
380 kIntrinsified);
381 locations->SetInAt(0, Location::RequiresRegister());
382 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
383 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
384}
385
386void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateRight(HInvoke* invoke) {
387 GenRotateRight(invoke, Primitive::kPrimInt, GetAssembler());
388}
389
Chris Larsen81284372015-10-21 15:28:53 -0700390// long java.lang.Long.rotateRight(long i, int distance)
Chris Larsen9aebff22015-09-22 17:54:15 -0700391void IntrinsicLocationsBuilderMIPS64::VisitLongRotateRight(HInvoke* invoke) {
392 LocationSummary* locations = new (arena_) LocationSummary(invoke,
393 LocationSummary::kNoCall,
394 kIntrinsified);
395 locations->SetInAt(0, Location::RequiresRegister());
396 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
397 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
398}
399
400void IntrinsicCodeGeneratorMIPS64::VisitLongRotateRight(HInvoke* invoke) {
401 GenRotateRight(invoke, Primitive::kPrimLong, GetAssembler());
402}
403
Chris Larsen0f8f8642015-10-02 17:25:58 -0700404static void GenRotateLeft(HInvoke* invoke,
405 Primitive::Type type,
406 Mips64Assembler* assembler) {
407 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
408
409 LocationSummary* locations = invoke->GetLocations();
410 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
411 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
412
413 if (invoke->InputAt(1)->IsIntConstant()) {
414 int32_t shift = -static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue());
415 if (type == Primitive::kPrimInt) {
416 shift &= 0x1f;
417 __ Rotr(out, in, shift);
418 } else {
419 shift &= 0x3f;
420 if (shift < 32) {
421 __ Drotr(out, in, shift);
422 } else {
423 shift &= 0x1f;
424 __ Drotr32(out, in, shift);
425 }
426 }
427 } else {
428 GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>();
429 if (type == Primitive::kPrimInt) {
430 __ Subu(TMP, ZERO, shamt);
431 __ Rotrv(out, in, TMP);
432 } else {
433 __ Dsubu(TMP, ZERO, shamt);
434 __ Drotrv(out, in, TMP);
435 }
436 }
437}
438
439// int java.lang.Integer.rotateLeft(int i, int distance)
440void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateLeft(HInvoke* invoke) {
441 LocationSummary* locations = new (arena_) LocationSummary(invoke,
442 LocationSummary::kNoCall,
443 kIntrinsified);
444 locations->SetInAt(0, Location::RequiresRegister());
445 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
446 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
447}
448
449void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateLeft(HInvoke* invoke) {
450 GenRotateLeft(invoke, Primitive::kPrimInt, GetAssembler());
451}
452
Chris Larsen81284372015-10-21 15:28:53 -0700453// long java.lang.Long.rotateLeft(long i, int distance)
Chris Larsen0f8f8642015-10-02 17:25:58 -0700454void IntrinsicLocationsBuilderMIPS64::VisitLongRotateLeft(HInvoke* invoke) {
455 LocationSummary* locations = new (arena_) LocationSummary(invoke,
456 LocationSummary::kNoCall,
457 kIntrinsified);
458 locations->SetInAt(0, Location::RequiresRegister());
459 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
460 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
461}
462
463void IntrinsicCodeGeneratorMIPS64::VisitLongRotateLeft(HInvoke* invoke) {
464 GenRotateLeft(invoke, Primitive::kPrimLong, GetAssembler());
465}
466
Chris Larsen3039e382015-08-26 07:54:08 -0700467static void GenReverse(LocationSummary* locations,
468 Primitive::Type type,
469 Mips64Assembler* assembler) {
470 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
471
472 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
473 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
474
475 if (type == Primitive::kPrimInt) {
476 __ Rotr(out, in, 16);
477 __ Wsbh(out, out);
478 __ Bitswap(out, out);
479 } else {
480 __ Dsbh(out, in);
481 __ Dshd(out, out);
482 __ Dbitswap(out, out);
483 }
484}
485
486// int java.lang.Integer.reverse(int)
487void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
488 CreateIntToIntLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
492 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
493}
494
495// long java.lang.Long.reverse(long)
496void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
497 CreateIntToIntLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
501 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
502}
503
Chris Larsen0b7ac982015-09-04 12:54:28 -0700504static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
505 LocationSummary* locations = new (arena) LocationSummary(invoke,
506 LocationSummary::kNoCall,
507 kIntrinsified);
508 locations->SetInAt(0, Location::RequiresFpuRegister());
509 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
510}
511
512static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
513 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
514 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
515
516 if (is64bit) {
517 __ AbsD(out, in);
518 } else {
519 __ AbsS(out, in);
520 }
521}
522
523// double java.lang.Math.abs(double)
524void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
525 CreateFPToFPLocations(arena_, invoke);
526}
527
528void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000529 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700530}
531
532// float java.lang.Math.abs(float)
533void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
534 CreateFPToFPLocations(arena_, invoke);
535}
536
537void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000538 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700539}
540
541static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
542 LocationSummary* locations = new (arena) LocationSummary(invoke,
543 LocationSummary::kNoCall,
544 kIntrinsified);
545 locations->SetInAt(0, Location::RequiresRegister());
546 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
547}
548
549static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
550 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
551 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
552
553 if (is64bit) {
554 __ Dsra32(AT, in, 31);
555 __ Xor(out, in, AT);
556 __ Dsubu(out, out, AT);
557 } else {
558 __ Sra(AT, in, 31);
559 __ Xor(out, in, AT);
560 __ Subu(out, out, AT);
561 }
562}
563
564// int java.lang.Math.abs(int)
565void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
566 CreateIntToInt(arena_, invoke);
567}
568
569void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000570 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700571}
572
573// long java.lang.Math.abs(long)
574void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
575 CreateIntToInt(arena_, invoke);
576}
577
578void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000579 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580}
581
582static void GenMinMaxFP(LocationSummary* locations,
583 bool is_min,
Chris Larsenb74353a2015-11-20 09:07:09 -0800584 Primitive::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700585 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800586 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
587 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700588 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
589
Chris Larsenb74353a2015-11-20 09:07:09 -0800590 Mips64Label noNaNs;
591 Mips64Label done;
592 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
593
594 // When Java computes min/max it prefers a NaN to a number; the
595 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
596 // the inputs is a NaN and the other is a valid number, the MIPS
597 // instruction will return the number; Java wants the NaN value
598 // returned. This is why there is extra logic preceding the use of
599 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
600 // NaN, return the NaN, otherwise return the min/max.
601 if (type == Primitive::kPrimDouble) {
602 __ CmpUnD(FTMP, a, b);
603 __ Bc1eqz(FTMP, &noNaNs);
604
605 // One of the inputs is a NaN
606 __ CmpEqD(ftmp, a, a);
607 // If a == a then b is the NaN, otherwise a is the NaN.
608 __ SelD(ftmp, a, b);
609
610 if (ftmp != out) {
611 __ MovD(out, ftmp);
612 }
613
614 __ Bc(&done);
615
616 __ Bind(&noNaNs);
617
Chris Larsen0b7ac982015-09-04 12:54:28 -0700618 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800619 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700620 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800621 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700622 }
623 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800624 DCHECK_EQ(type, Primitive::kPrimFloat);
625 __ CmpUnS(FTMP, a, b);
626 __ Bc1eqz(FTMP, &noNaNs);
627
628 // One of the inputs is a NaN
629 __ CmpEqS(ftmp, a, a);
630 // If a == a then b is the NaN, otherwise a is the NaN.
631 __ SelS(ftmp, a, b);
632
633 if (ftmp != out) {
634 __ MovS(out, ftmp);
635 }
636
637 __ Bc(&done);
638
639 __ Bind(&noNaNs);
640
Chris Larsen0b7ac982015-09-04 12:54:28 -0700641 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800642 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700643 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800644 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700645 }
646 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800647
648 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700649}
650
651static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
652 LocationSummary* locations = new (arena) LocationSummary(invoke,
653 LocationSummary::kNoCall,
654 kIntrinsified);
655 locations->SetInAt(0, Location::RequiresFpuRegister());
656 locations->SetInAt(1, Location::RequiresFpuRegister());
657 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
658}
659
660// double java.lang.Math.min(double, double)
661void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
662 CreateFPFPToFPLocations(arena_, invoke);
663}
664
665void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800666 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700667}
668
669// float java.lang.Math.min(float, float)
670void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
671 CreateFPFPToFPLocations(arena_, invoke);
672}
673
674void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800675 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700676}
677
678// double java.lang.Math.max(double, double)
679void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
680 CreateFPFPToFPLocations(arena_, invoke);
681}
682
683void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800684 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimDouble, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700685}
686
687// float java.lang.Math.max(float, float)
688void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
689 CreateFPFPToFPLocations(arena_, invoke);
690}
691
692void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800693 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, Primitive::kPrimFloat, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700694}
695
696static void GenMinMax(LocationSummary* locations,
697 bool is_min,
698 Mips64Assembler* assembler) {
699 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
700 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
701 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
702
Chris Larsenb74353a2015-11-20 09:07:09 -0800703 if (lhs == rhs) {
704 if (out != lhs) {
705 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700706 }
707 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800708 // Some architectures, such as ARM and MIPS (prior to r6), have a
709 // conditional move instruction which only changes the target
710 // (output) register if the condition is true (MIPS prior to r6 had
711 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
712 // change the target (output) register. If the condition is true the
713 // output register gets the contents of the "rs" register; otherwise,
714 // the output register is set to zero. One consequence of this is
715 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
716 // needs to use a pair of SELEQZ/SELNEZ instructions. After
717 // executing this pair of instructions one of the output registers
718 // from the pair will necessarily contain zero. Then the code ORs the
719 // output registers from the SELEQZ/SELNEZ instructions to get the
720 // final result.
721 //
722 // The initial test to see if the output register is same as the
723 // first input register is needed to make sure that value in the
724 // first input register isn't clobbered before we've finished
725 // computing the output value. The logic in the corresponding else
726 // clause performs the same task but makes sure the second input
727 // register isn't clobbered in the event that it's the same register
728 // as the output register; the else clause also handles the case
729 // where the output register is distinct from both the first, and the
730 // second input registers.
731 if (out == lhs) {
732 __ Slt(AT, rhs, lhs);
733 if (is_min) {
734 __ Seleqz(out, lhs, AT);
735 __ Selnez(AT, rhs, AT);
736 } else {
737 __ Selnez(out, lhs, AT);
738 __ Seleqz(AT, rhs, AT);
739 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700740 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800741 __ Slt(AT, lhs, rhs);
742 if (is_min) {
743 __ Seleqz(out, rhs, AT);
744 __ Selnez(AT, lhs, AT);
745 } else {
746 __ Selnez(out, rhs, AT);
747 __ Seleqz(AT, lhs, AT);
748 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700749 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800750 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700751 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700752}
753
754static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
755 LocationSummary* locations = new (arena) LocationSummary(invoke,
756 LocationSummary::kNoCall,
757 kIntrinsified);
758 locations->SetInAt(0, Location::RequiresRegister());
759 locations->SetInAt(1, Location::RequiresRegister());
760 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
761}
762
763// int java.lang.Math.min(int, int)
764void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
765 CreateIntIntToIntLocations(arena_, invoke);
766}
767
768void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000769 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700770}
771
772// long java.lang.Math.min(long, long)
773void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
774 CreateIntIntToIntLocations(arena_, invoke);
775}
776
777void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000778 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700779}
780
781// int java.lang.Math.max(int, int)
782void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
783 CreateIntIntToIntLocations(arena_, invoke);
784}
785
786void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000787 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700788}
789
790// long java.lang.Math.max(long, long)
791void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
792 CreateIntIntToIntLocations(arena_, invoke);
793}
794
795void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000796 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700797}
798
799// double java.lang.Math.sqrt(double)
800void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
801 CreateFPToFPLocations(arena_, invoke);
802}
803
804void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
805 LocationSummary* locations = invoke->GetLocations();
806 Mips64Assembler* assembler = GetAssembler();
807 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
808 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
809
810 __ SqrtD(out, in);
811}
812
Chris Larsen81284372015-10-21 15:28:53 -0700813static void CreateFPToFP(ArenaAllocator* arena,
814 HInvoke* invoke,
815 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700816 LocationSummary* locations = new (arena) LocationSummary(invoke,
817 LocationSummary::kNoCall,
818 kIntrinsified);
819 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700820 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700821}
822
823// double java.lang.Math.rint(double)
824void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700825 CreateFPToFP(arena_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700826}
827
828void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
829 LocationSummary* locations = invoke->GetLocations();
830 Mips64Assembler* assembler = GetAssembler();
831 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
832 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
833
834 __ RintD(out, in);
835}
836
837// double java.lang.Math.floor(double)
838void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
839 CreateFPToFP(arena_, invoke);
840}
841
Chris Larsen14500822015-10-01 11:35:18 -0700842const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
843 kPositiveInfinity |
844 kNegativeZero |
845 kNegativeInfinity |
846 kQuietNaN |
847 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700848
Chris Larsen81284372015-10-21 15:28:53 -0700849enum FloatRoundingMode {
850 kFloor,
851 kCeil,
852};
853
854static void GenRoundingMode(LocationSummary* locations,
855 FloatRoundingMode mode,
856 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700857 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
858 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
859
Chris Larsen81284372015-10-21 15:28:53 -0700860 DCHECK_NE(in, out);
861
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700862 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700863
Chris Larsen81284372015-10-21 15:28:53 -0700864 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700865 // if in.isNaN || in.isInfinite || in.isZero {
866 // return in;
867 // }
868 __ ClassD(out, in);
869 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700870 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700871 __ MovD(out, in);
872 __ Bnezc(AT, &done);
873
Chris Larsen81284372015-10-21 15:28:53 -0700874 // Long outLong = floor/ceil(in);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700875 // if outLong == Long.MAX_VALUE {
Chris Larsen81284372015-10-21 15:28:53 -0700876 // // floor()/ceil() has almost certainly returned a value
877 // // which can't be successfully represented as a signed
878 // // 64-bit number. Java expects that the input value will
879 // // be returned in these cases.
880 // // There is also a small probability that floor(in)/ceil(in)
881 // // correctly truncates/rounds up the input value to
882 // // Long.MAX_VALUE. In that case, this exception handling
883 // // code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700884 // return in;
885 // }
Chris Larsen81284372015-10-21 15:28:53 -0700886 if (mode == kFloor) {
887 __ FloorLD(out, in);
888 } else if (mode == kCeil) {
889 __ CeilLD(out, in);
890 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700891 __ Dmfc1(AT, out);
892 __ MovD(out, in);
893 __ LoadConst64(TMP, kPrimLongMax);
894 __ Beqc(AT, TMP, &done);
895
896 // double out = outLong;
897 // return out;
898 __ Dmtc1(AT, out);
899 __ Cvtdl(out, out);
900 __ Bind(&done);
901 // }
902}
903
Chris Larsen81284372015-10-21 15:28:53 -0700904void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
905 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
906}
907
Chris Larsen0b7ac982015-09-04 12:54:28 -0700908// double java.lang.Math.ceil(double)
909void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
910 CreateFPToFP(arena_, invoke);
911}
912
913void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700914 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700915}
916
Chris Larsen70fb1f42015-09-04 10:15:27 -0700917// byte libcore.io.Memory.peekByte(long address)
918void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
919 CreateIntToIntLocations(arena_, invoke);
920}
921
922void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
923 Mips64Assembler* assembler = GetAssembler();
924 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
925 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
926
927 __ Lb(out, adr, 0);
928}
929
930// short libcore.io.Memory.peekShort(long address)
931void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
932 CreateIntToIntLocations(arena_, invoke);
933}
934
935void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
936 Mips64Assembler* assembler = GetAssembler();
937 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
938 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
939
940 __ Lh(out, adr, 0);
941}
942
943// int libcore.io.Memory.peekInt(long address)
944void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
945 CreateIntToIntLocations(arena_, invoke);
946}
947
948void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
949 Mips64Assembler* assembler = GetAssembler();
950 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
951 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
952
953 __ Lw(out, adr, 0);
954}
955
956// long libcore.io.Memory.peekLong(long address)
957void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
958 CreateIntToIntLocations(arena_, invoke);
959}
960
961void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
962 Mips64Assembler* assembler = GetAssembler();
963 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
964 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
965
966 __ Ld(out, adr, 0);
967}
968
969static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
970 LocationSummary* locations = new (arena) LocationSummary(invoke,
971 LocationSummary::kNoCall,
972 kIntrinsified);
973 locations->SetInAt(0, Location::RequiresRegister());
974 locations->SetInAt(1, Location::RequiresRegister());
975}
976
977// void libcore.io.Memory.pokeByte(long address, byte value)
978void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
979 CreateIntIntToVoidLocations(arena_, invoke);
980}
981
982void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
983 Mips64Assembler* assembler = GetAssembler();
984 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
985 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
986
987 __ Sb(val, adr, 0);
988}
989
990// void libcore.io.Memory.pokeShort(long address, short value)
991void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
992 CreateIntIntToVoidLocations(arena_, invoke);
993}
994
995void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
996 Mips64Assembler* assembler = GetAssembler();
997 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
998 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
999
1000 __ Sh(val, adr, 0);
1001}
1002
1003// void libcore.io.Memory.pokeInt(long address, int value)
1004void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1005 CreateIntIntToVoidLocations(arena_, invoke);
1006}
1007
1008void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1009 Mips64Assembler* assembler = GetAssembler();
1010 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1011 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1012
1013 __ Sw(val, adr, 00);
1014}
1015
1016// void libcore.io.Memory.pokeLong(long address, long value)
1017void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1018 CreateIntIntToVoidLocations(arena_, invoke);
1019}
1020
1021void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1022 Mips64Assembler* assembler = GetAssembler();
1023 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1024 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1025
1026 __ Sd(val, adr, 0);
1027}
1028
Chris Larsen49e55392015-09-04 16:04:03 -07001029// Thread java.lang.Thread.currentThread()
1030void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1031 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1032 LocationSummary::kNoCall,
1033 kIntrinsified);
1034 locations->SetOut(Location::RequiresRegister());
1035}
1036
1037void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1038 Mips64Assembler* assembler = GetAssembler();
1039 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1040
1041 __ LoadFromOffset(kLoadUnsignedWord,
1042 out,
1043 TR,
1044 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1045}
1046
Chris Larsen1360ada2015-09-04 23:38:16 -07001047static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
1048 LocationSummary* locations = new (arena) LocationSummary(invoke,
1049 LocationSummary::kNoCall,
1050 kIntrinsified);
1051 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1052 locations->SetInAt(1, Location::RequiresRegister());
1053 locations->SetInAt(2, Location::RequiresRegister());
1054 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1055}
1056
1057static void GenUnsafeGet(HInvoke* invoke,
1058 Primitive::Type type,
1059 bool is_volatile,
1060 CodeGeneratorMIPS64* codegen) {
1061 LocationSummary* locations = invoke->GetLocations();
1062 DCHECK((type == Primitive::kPrimInt) ||
1063 (type == Primitive::kPrimLong) ||
1064 (type == Primitive::kPrimNot));
1065 Mips64Assembler* assembler = codegen->GetAssembler();
1066 // Object pointer.
1067 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1068 // Long offset.
1069 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1070 GpuRegister trg = locations->Out().AsRegister<GpuRegister>();
1071
1072 __ Daddu(TMP, base, offset);
1073 if (is_volatile) {
1074 __ Sync(0);
1075 }
1076 switch (type) {
1077 case Primitive::kPrimInt:
1078 __ Lw(trg, TMP, 0);
1079 break;
1080
1081 case Primitive::kPrimNot:
1082 __ Lwu(trg, TMP, 0);
1083 break;
1084
1085 case Primitive::kPrimLong:
1086 __ Ld(trg, TMP, 0);
1087 break;
1088
1089 default:
1090 LOG(FATAL) << "Unsupported op size " << type;
1091 UNREACHABLE();
1092 }
1093}
1094
1095// int sun.misc.Unsafe.getInt(Object o, long offset)
1096void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
1097 CreateIntIntIntToIntLocations(arena_, invoke);
1098}
1099
1100void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001101 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001102}
1103
1104// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1105void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
1106 CreateIntIntIntToIntLocations(arena_, invoke);
1107}
1108
1109void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001110 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001111}
1112
1113// long sun.misc.Unsafe.getLong(Object o, long offset)
1114void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
1115 CreateIntIntIntToIntLocations(arena_, invoke);
1116}
1117
1118void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001119 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001120}
1121
1122// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1123void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1124 CreateIntIntIntToIntLocations(arena_, invoke);
1125}
1126
1127void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001128 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001129}
1130
1131// Object sun.misc.Unsafe.getObject(Object o, long offset)
1132void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
1133 CreateIntIntIntToIntLocations(arena_, invoke);
1134}
1135
1136void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001137 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001138}
1139
1140// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1141void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1142 CreateIntIntIntToIntLocations(arena_, invoke);
1143}
1144
1145void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001146 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001147}
1148
1149static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
1150 LocationSummary* locations = new (arena) LocationSummary(invoke,
1151 LocationSummary::kNoCall,
1152 kIntrinsified);
1153 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1154 locations->SetInAt(1, Location::RequiresRegister());
1155 locations->SetInAt(2, Location::RequiresRegister());
1156 locations->SetInAt(3, Location::RequiresRegister());
1157}
1158
1159static void GenUnsafePut(LocationSummary* locations,
1160 Primitive::Type type,
1161 bool is_volatile,
1162 bool is_ordered,
1163 CodeGeneratorMIPS64* codegen) {
1164 DCHECK((type == Primitive::kPrimInt) ||
1165 (type == Primitive::kPrimLong) ||
1166 (type == Primitive::kPrimNot));
1167 Mips64Assembler* assembler = codegen->GetAssembler();
1168 // Object pointer.
1169 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1170 // Long offset.
1171 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1172 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1173
1174 __ Daddu(TMP, base, offset);
1175 if (is_volatile || is_ordered) {
1176 __ Sync(0);
1177 }
1178 switch (type) {
1179 case Primitive::kPrimInt:
1180 case Primitive::kPrimNot:
1181 __ Sw(value, TMP, 0);
1182 break;
1183
1184 case Primitive::kPrimLong:
1185 __ Sd(value, TMP, 0);
1186 break;
1187
1188 default:
1189 LOG(FATAL) << "Unsupported op size " << type;
1190 UNREACHABLE();
1191 }
1192 if (is_volatile) {
1193 __ Sync(0);
1194 }
1195
1196 if (type == Primitive::kPrimNot) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001197 bool value_can_be_null = true; // TODO: Worth finding out this information?
1198 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001199 }
1200}
1201
1202// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1203void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
1204 CreateIntIntIntIntToVoid(arena_, invoke);
1205}
1206
1207void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001208 GenUnsafePut(invoke->GetLocations(),
1209 Primitive::kPrimInt,
1210 /* is_volatile */ false,
1211 /* is_ordered */ false,
1212 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001213}
1214
1215// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1216void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
1217 CreateIntIntIntIntToVoid(arena_, invoke);
1218}
1219
1220void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001221 GenUnsafePut(invoke->GetLocations(),
1222 Primitive::kPrimInt,
1223 /* is_volatile */ false,
1224 /* is_ordered */ true,
1225 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001226}
1227
1228// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1229void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
1230 CreateIntIntIntIntToVoid(arena_, invoke);
1231}
1232
1233void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001234 GenUnsafePut(invoke->GetLocations(),
1235 Primitive::kPrimInt,
1236 /* is_volatile */ true,
1237 /* is_ordered */ false,
1238 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001239}
1240
1241// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1242void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
1243 CreateIntIntIntIntToVoid(arena_, invoke);
1244}
1245
1246void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001247 GenUnsafePut(invoke->GetLocations(),
1248 Primitive::kPrimNot,
1249 /* is_volatile */ false,
1250 /* is_ordered */ false,
1251 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001252}
1253
1254// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1255void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1256 CreateIntIntIntIntToVoid(arena_, invoke);
1257}
1258
1259void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001260 GenUnsafePut(invoke->GetLocations(),
1261 Primitive::kPrimNot,
1262 /* is_volatile */ false,
1263 /* is_ordered */ true,
1264 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001265}
1266
1267// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1268void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1269 CreateIntIntIntIntToVoid(arena_, invoke);
1270}
1271
1272void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001273 GenUnsafePut(invoke->GetLocations(),
1274 Primitive::kPrimNot,
1275 /* is_volatile */ true,
1276 /* is_ordered */ false,
1277 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001278}
1279
1280// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1281void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
1282 CreateIntIntIntIntToVoid(arena_, invoke);
1283}
1284
1285void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001286 GenUnsafePut(invoke->GetLocations(),
1287 Primitive::kPrimLong,
1288 /* is_volatile */ false,
1289 /* is_ordered */ false,
1290 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001291}
1292
1293// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1294void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1295 CreateIntIntIntIntToVoid(arena_, invoke);
1296}
1297
1298void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001299 GenUnsafePut(invoke->GetLocations(),
1300 Primitive::kPrimLong,
1301 /* is_volatile */ false,
1302 /* is_ordered */ true,
1303 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001304}
1305
1306// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1307void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1308 CreateIntIntIntIntToVoid(arena_, invoke);
1309}
1310
1311void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001312 GenUnsafePut(invoke->GetLocations(),
1313 Primitive::kPrimLong,
1314 /* is_volatile */ true,
1315 /* is_ordered */ false,
1316 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001317}
1318
Chris Larsen36427492015-10-23 02:19:38 -07001319static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1320 LocationSummary* locations = new (arena) LocationSummary(invoke,
1321 LocationSummary::kNoCall,
1322 kIntrinsified);
1323 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1324 locations->SetInAt(1, Location::RequiresRegister());
1325 locations->SetInAt(2, Location::RequiresRegister());
1326 locations->SetInAt(3, Location::RequiresRegister());
1327 locations->SetInAt(4, Location::RequiresRegister());
1328
1329 locations->SetOut(Location::RequiresRegister());
1330}
1331
1332static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorMIPS64* codegen) {
1333 Mips64Assembler* assembler = codegen->GetAssembler();
1334 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1335 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1336 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1337 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
1338 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1339
1340 DCHECK_NE(base, out);
1341 DCHECK_NE(offset, out);
1342 DCHECK_NE(expected, out);
1343
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001344 if (type == Primitive::kPrimNot) {
1345 // Mark card for object assuming new value is stored.
1346 bool value_can_be_null = true; // TODO: Worth finding out this information?
1347 codegen->MarkGCCard(base, value, value_can_be_null);
1348 }
1349
Chris Larsen36427492015-10-23 02:19:38 -07001350 // do {
1351 // tmp_value = [tmp_ptr] - expected;
1352 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1353 // result = tmp_value != 0;
1354
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001355 Mips64Label loop_head, exit_loop;
Chris Larsen36427492015-10-23 02:19:38 -07001356 __ Daddu(TMP, base, offset);
1357 __ Sync(0);
1358 __ Bind(&loop_head);
1359 if (type == Primitive::kPrimLong) {
1360 __ Lld(out, TMP);
1361 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001362 // Note: We will need a read barrier here, when read barrier
1363 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001364 __ Ll(out, TMP);
1365 }
1366 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1367 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1368 __ Beqzc(out, &exit_loop); // return.
1369 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1370 // If we use 'value' directly, we would lose 'value'
1371 // in the case that the store fails. Whether the
1372 // store succeeds, or fails, it will load the
1373 // correct boolean value into the 'out' register.
1374 if (type == Primitive::kPrimLong) {
1375 __ Scd(out, TMP);
1376 } else {
1377 __ Sc(out, TMP);
1378 }
1379 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1380 // cycle atomically then retry.
1381 __ Bind(&exit_loop);
1382 __ Sync(0);
1383}
1384
1385// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1386void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
1387 CreateIntIntIntIntIntToInt(arena_, invoke);
1388}
1389
1390void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
1391 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1392}
1393
1394// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1395void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
1396 CreateIntIntIntIntIntToInt(arena_, invoke);
1397}
1398
1399void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
1400 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1401}
1402
1403// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1404void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
1405 CreateIntIntIntIntIntToInt(arena_, invoke);
1406}
1407
1408void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
1409 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1410}
1411
Chris Larsen9701c2e2015-09-04 17:22:47 -07001412// char java.lang.String.charAt(int index)
1413void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) {
1414 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1415 LocationSummary::kCallOnSlowPath,
1416 kIntrinsified);
1417 locations->SetInAt(0, Location::RequiresRegister());
1418 locations->SetInAt(1, Location::RequiresRegister());
1419 locations->SetOut(Location::SameAsFirstInput());
1420}
1421
1422void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) {
1423 LocationSummary* locations = invoke->GetLocations();
1424 Mips64Assembler* assembler = GetAssembler();
1425
1426 // Location of reference to data array
1427 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1428 // Location of count
1429 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1430
1431 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1432 GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>();
1433 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1434
1435 // TODO: Maybe we can support range check elimination. Overall,
1436 // though, I think it's not worth the cost.
1437 // TODO: For simplicity, the index parameter is requested in a
1438 // register, so different from Quick we will not optimize the
1439 // code for constants (which would save a register).
1440
1441 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1442 codegen_->AddSlowPath(slow_path);
1443
1444 // Load the string size
1445 __ Lw(TMP, obj, count_offset);
1446 codegen_->MaybeRecordImplicitNullCheck(invoke);
1447 // Revert to slow path if idx is too large, or negative
1448 __ Bgeuc(idx, TMP, slow_path->GetEntryLabel());
1449
1450 // out = obj[2*idx].
1451 __ Sll(TMP, idx, 1); // idx * 2
1452 __ Daddu(TMP, TMP, obj); // Address of char at location idx
1453 __ Lhu(out, TMP, value_offset); // Load char at location idx
1454
1455 __ Bind(slow_path->GetExitLabel());
1456}
1457
1458// int java.lang.String.compareTo(String anotherString)
1459void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1460 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1461 LocationSummary::kCall,
1462 kIntrinsified);
1463 InvokeRuntimeCallingConvention calling_convention;
1464 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1465 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1466 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1467 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1468}
1469
1470void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1471 Mips64Assembler* assembler = GetAssembler();
1472 LocationSummary* locations = invoke->GetLocations();
1473
1474 // Note that the null check must have been done earlier.
1475 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1476
1477 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1478 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1479 codegen_->AddSlowPath(slow_path);
1480 __ Beqzc(argument, slow_path->GetEntryLabel());
1481
1482 __ LoadFromOffset(kLoadDoubleword,
1483 TMP,
1484 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001485 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pStringCompareTo).Int32Value());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001486 __ Jalr(TMP);
1487 __ Nop();
1488 __ Bind(slow_path->GetExitLabel());
1489}
1490
Chris Larsen972d6d72015-10-20 11:29:12 -07001491// boolean java.lang.String.equals(Object anObject)
1492void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
1493 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1494 LocationSummary::kNoCall,
1495 kIntrinsified);
1496 locations->SetInAt(0, Location::RequiresRegister());
1497 locations->SetInAt(1, Location::RequiresRegister());
1498 locations->SetOut(Location::RequiresRegister());
1499
1500 // Temporary registers to store lengths of strings and for calculations.
1501 locations->AddTemp(Location::RequiresRegister());
1502 locations->AddTemp(Location::RequiresRegister());
1503 locations->AddTemp(Location::RequiresRegister());
1504}
1505
1506void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1507 Mips64Assembler* assembler = GetAssembler();
1508 LocationSummary* locations = invoke->GetLocations();
1509
1510 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1511 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1512 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1513
1514 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1515 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1516 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1517
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001518 Mips64Label loop;
1519 Mips64Label end;
1520 Mips64Label return_true;
1521 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001522
1523 // Get offsets of count, value, and class fields within a string object.
1524 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1525 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1526 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1527
1528 // Note that the null check must have been done earlier.
1529 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1530
1531 // If the register containing the pointer to "this", and the register
1532 // containing the pointer to "anObject" are the same register then
1533 // "this", and "anObject" are the same object and we can
1534 // short-circuit the logic to a true result.
1535 if (str == arg) {
1536 __ LoadConst64(out, 1);
1537 return;
1538 }
1539
1540 // Check if input is null, return false if it is.
1541 __ Beqzc(arg, &return_false);
1542
1543 // Reference equality check, return true if same reference.
1544 __ Beqc(str, arg, &return_true);
1545
1546 // Instanceof check for the argument by comparing class fields.
1547 // All string objects must have the same type since String cannot be subclassed.
1548 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1549 // If the argument is a string object, its class field must be equal to receiver's class field.
1550 __ Lw(temp1, str, class_offset);
1551 __ Lw(temp2, arg, class_offset);
1552 __ Bnec(temp1, temp2, &return_false);
1553
1554 // Load lengths of this and argument strings.
1555 __ Lw(temp1, str, count_offset);
1556 __ Lw(temp2, arg, count_offset);
1557 // Check if lengths are equal, return false if they're not.
1558 __ Bnec(temp1, temp2, &return_false);
1559 // Return true if both strings are empty.
1560 __ Beqzc(temp1, &return_true);
1561
1562 // Don't overwrite input registers
1563 __ Move(TMP, str);
1564 __ Move(temp3, arg);
1565
1566 // Assertions that must hold in order to compare strings 4 characters at a time.
1567 DCHECK_ALIGNED(value_offset, 8);
1568 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1569
1570 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1571 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1572 __ Bind(&loop);
1573 __ Ld(out, TMP, value_offset);
1574 __ Ld(temp2, temp3, value_offset);
1575 __ Bnec(out, temp2, &return_false);
1576 __ Daddiu(TMP, TMP, 8);
1577 __ Daddiu(temp3, temp3, 8);
1578 __ Addiu(temp1, temp1, -4);
1579 __ Bgtzc(temp1, &loop);
1580
1581 // Return true and exit the function.
1582 // If loop does not result in returning false, we return true.
1583 __ Bind(&return_true);
1584 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001585 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001586
1587 // Return false and exit the function.
1588 __ Bind(&return_false);
1589 __ LoadConst64(out, 0);
1590 __ Bind(&end);
1591}
1592
Chris Larsen9701c2e2015-09-04 17:22:47 -07001593static void GenerateStringIndexOf(HInvoke* invoke,
1594 Mips64Assembler* assembler,
1595 CodeGeneratorMIPS64* codegen,
1596 ArenaAllocator* allocator,
1597 bool start_at_zero) {
1598 LocationSummary* locations = invoke->GetLocations();
1599 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1600
1601 // Note that the null check must have been done earlier.
1602 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1603
1604 // Check for code points > 0xFFFF. Either a slow-path check when we
1605 // don't know statically, or directly dispatch if we have a constant.
1606 SlowPathCodeMIPS64* slow_path = nullptr;
1607 if (invoke->InputAt(1)->IsIntConstant()) {
1608 if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) {
1609 // Always needs the slow-path. We could directly dispatch to it,
1610 // but this case should be rare, so for simplicity just put the
1611 // full slow-path down and branch unconditionally.
1612 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1613 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001614 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001615 __ Bind(slow_path->GetExitLabel());
1616 return;
1617 }
1618 } else {
1619 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1620 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1621 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1622 codegen->AddSlowPath(slow_path);
1623 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1624 }
1625
1626 if (start_at_zero) {
1627 DCHECK_EQ(tmp_reg, A2);
1628 // Start-index = 0.
1629 __ Clear(tmp_reg);
1630 } else {
1631 __ Slt(TMP, A2, ZERO); // if fromIndex < 0
1632 __ Seleqz(A2, A2, TMP); // fromIndex = 0
1633 }
1634
1635 __ LoadFromOffset(kLoadDoubleword,
1636 TMP,
1637 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001638 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, pIndexOf).Int32Value());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001639 __ Jalr(TMP);
1640 __ Nop();
1641
1642 if (slow_path != nullptr) {
1643 __ Bind(slow_path->GetExitLabel());
1644 }
1645}
1646
1647// int java.lang.String.indexOf(int ch)
1648void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
1649 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1650 LocationSummary::kCall,
1651 kIntrinsified);
1652 // We have a hand-crafted assembly stub that follows the runtime
1653 // calling convention. So it's best to align the inputs accordingly.
1654 InvokeRuntimeCallingConvention calling_convention;
1655 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1656 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1657 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1658 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1659
1660 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1661 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1662}
1663
1664void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001665 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001666}
1667
1668// int java.lang.String.indexOf(int ch, int fromIndex)
1669void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
1670 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1671 LocationSummary::kCall,
1672 kIntrinsified);
1673 // We have a hand-crafted assembly stub that follows the runtime
1674 // calling convention. So it's best to align the inputs accordingly.
1675 InvokeRuntimeCallingConvention calling_convention;
1676 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1677 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1678 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1679 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1680 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1681}
1682
1683void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001684 GenerateStringIndexOf(
1685 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001686}
1687
Roland Levillaincc3839c2016-02-29 16:23:48 +00001688// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001689void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1690 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1691 LocationSummary::kCall,
1692 kIntrinsified);
1693 InvokeRuntimeCallingConvention calling_convention;
1694 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1695 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1696 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1697 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1698 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1699 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1700}
1701
1702void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1703 Mips64Assembler* assembler = GetAssembler();
1704 LocationSummary* locations = invoke->GetLocations();
1705
1706 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1707 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1708 codegen_->AddSlowPath(slow_path);
1709 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1710
1711 __ LoadFromOffset(kLoadDoubleword,
1712 TMP,
1713 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001714 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1715 pAllocStringFromBytes).Int32Value());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001716 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1717 __ Jalr(TMP);
1718 __ Nop();
1719 __ Bind(slow_path->GetExitLabel());
1720}
1721
Roland Levillaincc3839c2016-02-29 16:23:48 +00001722// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001723void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1724 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1725 LocationSummary::kCall,
1726 kIntrinsified);
1727 InvokeRuntimeCallingConvention calling_convention;
1728 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1729 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1730 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1731 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1732 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1733}
1734
1735void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
1736 Mips64Assembler* assembler = GetAssembler();
1737
Roland Levillaincc3839c2016-02-29 16:23:48 +00001738 // No need to emit code checking whether `locations->InAt(2)` is a null
1739 // pointer, as callers of the native method
1740 //
1741 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1742 //
1743 // all include a null check on `data` before calling that method.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001744 __ LoadFromOffset(kLoadDoubleword,
1745 TMP,
1746 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001747 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1748 pAllocStringFromChars).Int32Value());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001749 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1750 __ Jalr(TMP);
1751 __ Nop();
1752}
1753
1754// java.lang.String.String(String original)
1755void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1756 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1757 LocationSummary::kCall,
1758 kIntrinsified);
1759 InvokeRuntimeCallingConvention calling_convention;
1760 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1761 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1762 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1763 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
1764 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1765}
1766
1767void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1768 Mips64Assembler* assembler = GetAssembler();
1769 LocationSummary* locations = invoke->GetLocations();
1770
1771 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1772 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1773 codegen_->AddSlowPath(slow_path);
1774 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1775
1776 __ LoadFromOffset(kLoadDoubleword,
1777 TMP,
1778 TR,
Lazar Trsicd9672662015-09-03 17:33:01 +02001779 QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize,
1780 pAllocStringFromString).Int32Value());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001781 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1782 __ Jalr(TMP);
1783 __ Nop();
1784 __ Bind(slow_path->GetExitLabel());
1785}
1786
Chris Larsen3039e382015-08-26 07:54:08 -07001787// Unimplemented intrinsics.
1788
1789#define UNIMPLEMENTED_INTRINSIC(Name) \
1790void IntrinsicLocationsBuilderMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1791} \
1792void IntrinsicCodeGeneratorMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1793}
1794
Aart Bik3f67e692016-01-15 14:35:12 -08001795UNIMPLEMENTED_INTRINSIC(IntegerBitCount)
1796UNIMPLEMENTED_INTRINSIC(LongBitCount)
1797
Chris Larsen3039e382015-08-26 07:54:08 -07001798UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
1799UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
Chris Larsen0b7ac982015-09-04 12:54:28 -07001800
Chris Larsen3039e382015-08-26 07:54:08 -07001801UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1802UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1803UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001804UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Chris Larsen3039e382015-08-26 07:54:08 -07001805
Mark Mendella4f12202015-08-06 15:23:34 -04001806UNIMPLEMENTED_INTRINSIC(MathCos)
1807UNIMPLEMENTED_INTRINSIC(MathSin)
1808UNIMPLEMENTED_INTRINSIC(MathAcos)
1809UNIMPLEMENTED_INTRINSIC(MathAsin)
1810UNIMPLEMENTED_INTRINSIC(MathAtan)
1811UNIMPLEMENTED_INTRINSIC(MathAtan2)
1812UNIMPLEMENTED_INTRINSIC(MathCbrt)
1813UNIMPLEMENTED_INTRINSIC(MathCosh)
1814UNIMPLEMENTED_INTRINSIC(MathExp)
1815UNIMPLEMENTED_INTRINSIC(MathExpm1)
1816UNIMPLEMENTED_INTRINSIC(MathHypot)
1817UNIMPLEMENTED_INTRINSIC(MathLog)
1818UNIMPLEMENTED_INTRINSIC(MathLog10)
1819UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1820UNIMPLEMENTED_INTRINSIC(MathSinh)
1821UNIMPLEMENTED_INTRINSIC(MathTan)
1822UNIMPLEMENTED_INTRINSIC(MathTanh)
1823
Aart Bik59c94542016-01-25 14:20:58 -08001824UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
1825UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
Aart Bik59c94542016-01-25 14:20:58 -08001826
Aart Bik59c94542016-01-25 14:20:58 -08001827UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
1828UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
1829UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
1830UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
Aart Bika19616e2016-02-01 18:57:58 -08001831
1832// Handled as HIR instructions.
Aart Bik2a6aad92016-02-25 11:32:32 -08001833UNIMPLEMENTED_INTRINSIC(FloatFloatToIntBits)
1834UNIMPLEMENTED_INTRINSIC(DoubleDoubleToLongBits)
Aart Bik75a38b22016-02-17 10:41:50 -08001835UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
1836UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
Aart Bika19616e2016-02-01 18:57:58 -08001837UNIMPLEMENTED_INTRINSIC(IntegerCompare)
1838UNIMPLEMENTED_INTRINSIC(LongCompare)
Aart Bik59c94542016-01-25 14:20:58 -08001839UNIMPLEMENTED_INTRINSIC(IntegerSignum)
1840UNIMPLEMENTED_INTRINSIC(LongSignum)
1841
Chris Larsen3039e382015-08-26 07:54:08 -07001842#undef UNIMPLEMENTED_INTRINSIC
1843
1844#undef __
1845
1846} // namespace mips64
1847} // namespace art