blob: 1a7b06d2069fc9a726a01e9ba8fec30ddbea0a58 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Lena Djokic0d2cab52018-03-06 15:20:45 +010049inline bool IntrinsicCodeGeneratorMIPS64::HasMsa() const {
50 return codegen_->GetInstructionSetFeatures().HasMsa();
51}
52
Chris Larsen9701c2e2015-09-04 17:22:47 -070053#define __ codegen->GetAssembler()->
54
55static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070057 CodeGeneratorMIPS64* codegen) {
58 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060 return;
61 }
62
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070064
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070066 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
67 if (trg_reg != V0) {
68 __ Move(V0, trg_reg);
69 }
70 } else {
71 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
72 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010073 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070074 __ MovS(F0, trg_reg);
75 } else {
76 __ MovD(F0, trg_reg);
77 }
78 }
79 }
80}
81
82static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
83 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
84 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
85}
86
87// Slow-path for fallback (calling the managed code to handle the
88// intrinsic) in an intrinsified call. This will copy the arguments
89// into the positions for a regular call.
90//
91// Note: The actual parameters are required to be in the locations
92// given by the invoke's location summary. If an intrinsic
93// modifies those locations before a slowpath call, they must be
94// restored!
95class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
96 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000097 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
98 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070099
100 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
101 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
102
103 __ Bind(GetEntryLabel());
104
105 SaveLiveRegisters(codegen, invoke_->GetLocations());
106
107 MoveArguments(invoke_, codegen);
108
109 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100110 codegen->GenerateStaticOrDirectCall(
111 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700112 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100113 codegen->GenerateVirtualCall(
114 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700115 }
116
117 // Copy the result back to the expected output.
118 Location out = invoke_->GetLocations()->Out();
119 if (out.IsValid()) {
120 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
121 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
122 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
123 }
124
125 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700126 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700127 }
128
129 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
130
131 private:
132 // The instruction where this slow path is happening.
133 HInvoke* const invoke_;
134
135 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
136};
137
138#undef __
139
Chris Larsen3039e382015-08-26 07:54:08 -0700140bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
141 Dispatch(invoke);
142 LocationSummary* res = invoke->GetLocations();
143 return res != nullptr && res->Intrinsified();
144}
145
146#define __ assembler->
147
Vladimir Markoca6fff82017-10-03 14:49:14 +0100148static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
149 LocationSummary* locations =
150 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700151 locations->SetInAt(0, Location::RequiresFpuRegister());
152 locations->SetOut(Location::RequiresRegister());
153}
154
155static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
156 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
157 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
158
159 if (is64bit) {
160 __ Dmfc1(out, in);
161 } else {
162 __ Mfc1(out, in);
163 }
164}
165
166// long java.lang.Double.doubleToRawLongBits(double)
167void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100168 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000172 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700173}
174
175// int java.lang.Float.floatToRawIntBits(float)
176void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100177 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
180void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000181 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700182}
183
Vladimir Markoca6fff82017-10-03 14:49:14 +0100184static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
185 LocationSummary* locations =
186 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
192 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
193 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
194
195 if (is64bit) {
196 __ Dmtc1(in, out);
197 } else {
198 __ Mtc1(in, out);
199 }
200}
201
202// double java.lang.Double.longBitsToDouble(long)
203void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100204 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700209}
210
211// float java.lang.Float.intBitsToFloat(int)
212void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100213 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
216void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000217 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700218}
219
Vladimir Markoca6fff82017-10-03 14:49:14 +0100220static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
221 LocationSummary* locations =
222 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100228 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700229 Mips64Assembler* assembler) {
230 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
231 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
232
233 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Dsbh(out, in);
236 __ Seh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Rotr(out, in, 16);
240 __ Wsbh(out, out);
241 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100242 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700243 __ Dsbh(out, in);
244 __ Dshd(out, out);
245 break;
246 default:
247 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
248 UNREACHABLE();
249 }
250}
251
252// int java.lang.Integer.reverseBytes(int)
253void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100254 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100258 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700259}
260
261// long java.lang.Long.reverseBytes(long)
262void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100263 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100267 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700268}
269
270// short java.lang.Short.reverseBytes(short)
271void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100272 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
275void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100276 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700277}
278
Chris Larsen81284372015-10-21 15:28:53 -0700279static void GenNumberOfLeadingZeroes(LocationSummary* locations,
280 bool is64bit,
281 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700282 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
283 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
284
285 if (is64bit) {
286 __ Dclz(out, in);
287 } else {
288 __ Clz(out, in);
289 }
290}
291
292// int java.lang.Integer.numberOfLeadingZeros(int i)
293void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100294 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000298 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700299}
300
301// int java.lang.Long.numberOfLeadingZeros(long i)
302void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100303 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700304}
305
306void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000307 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700308}
309
Chris Larsen81284372015-10-21 15:28:53 -0700310static void GenNumberOfTrailingZeroes(LocationSummary* locations,
311 bool is64bit,
312 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700313 Location in = locations->InAt(0);
314 Location out = locations->Out();
315
316 if (is64bit) {
317 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
318 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
319 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 } else {
322 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
323 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
324 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
325 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
326 }
327}
328
329// int java.lang.Integer.numberOfTrailingZeros(int i)
330void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100331 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000335 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700336}
337
338// int java.lang.Long.numberOfTrailingZeros(long i)
339void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100340 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700341}
342
343void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000344 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700345}
346
347static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100348 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700349 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100350 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700351
352 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
353 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700356 __ Rotr(out, in, 16);
357 __ Wsbh(out, out);
358 __ Bitswap(out, out);
359 } else {
360 __ Dsbh(out, in);
361 __ Dshd(out, out);
362 __ Dbitswap(out, out);
363 }
364}
365
366// int java.lang.Integer.reverse(int)
367void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100368 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100372 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700373}
374
375// long java.lang.Long.reverse(long)
376void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100377 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
380void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100381 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700382}
383
Vladimir Markoca6fff82017-10-03 14:49:14 +0100384static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
385 LocationSummary* locations =
386 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700387 locations->SetInAt(0, Location::RequiresFpuRegister());
388 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
389}
390
Chris Larsen7fda7852016-04-21 16:00:36 -0700391static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100392 const DataType::Type type,
Lena Djokic0d2cab52018-03-06 15:20:45 +0100393 const bool hasMsa,
Chris Larsen7fda7852016-04-21 16:00:36 -0700394 Mips64Assembler* assembler) {
395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
396 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100398 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700399
400 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
401 //
402 // A generalization of the best bit counting method to integers of
403 // bit-widths up to 128 (parameterized by type T) is this:
404 //
405 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
406 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
407 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
408 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
409 //
410 // For comparison, for 32-bit quantities, this algorithm can be executed
411 // using 20 MIPS instructions (the calls to LoadConst32() generate two
412 // machine instructions each for the values being used in this algorithm).
413 // A(n unrolled) loop-based algorithm requires 25 instructions.
414 //
415 // For a 64-bit operand this can be performed in 24 instructions compared
416 // to a(n unrolled) loop based algorithm which requires 38 instructions.
417 //
418 // There are algorithms which are faster in the cases where very few
419 // bits are set but the algorithm here attempts to minimize the total
420 // number of instructions executed even when a large number of bits
421 // are set.
Lena Djokic0d2cab52018-03-06 15:20:45 +0100422 if (hasMsa) {
423 if (type == DataType::Type::kInt32) {
424 __ Mtc1(in, FTMP);
425 __ PcntW(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
426 __ Mfc1(out, FTMP);
427 } else {
428 __ Dmtc1(in, FTMP);
429 __ PcntD(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
430 __ Dmfc1(out, FTMP);
431 }
432 } else {
433 if (type == DataType::Type::kInt32) {
434 __ Srl(TMP, in, 1);
435 __ LoadConst32(AT, 0x55555555);
436 __ And(TMP, TMP, AT);
437 __ Subu(TMP, in, TMP);
438 __ LoadConst32(AT, 0x33333333);
439 __ And(out, TMP, AT);
440 __ Srl(TMP, TMP, 2);
441 __ And(TMP, TMP, AT);
442 __ Addu(TMP, out, TMP);
443 __ Srl(out, TMP, 4);
444 __ Addu(out, out, TMP);
445 __ LoadConst32(AT, 0x0F0F0F0F);
446 __ And(out, out, AT);
447 __ LoadConst32(TMP, 0x01010101);
448 __ MulR6(out, out, TMP);
449 __ Srl(out, out, 24);
450 } else {
451 __ Dsrl(TMP, in, 1);
452 __ LoadConst64(AT, 0x5555555555555555L);
453 __ And(TMP, TMP, AT);
454 __ Dsubu(TMP, in, TMP);
455 __ LoadConst64(AT, 0x3333333333333333L);
456 __ And(out, TMP, AT);
457 __ Dsrl(TMP, TMP, 2);
458 __ And(TMP, TMP, AT);
459 __ Daddu(TMP, out, TMP);
460 __ Dsrl(out, TMP, 4);
461 __ Daddu(out, out, TMP);
462 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
463 __ And(out, out, AT);
464 __ LoadConst64(TMP, 0x0101010101010101L);
465 __ Dmul(out, out, TMP);
466 __ Dsrl32(out, out, 24);
467 }
Chris Larsen7fda7852016-04-21 16:00:36 -0700468 }
469}
470
471// int java.lang.Integer.bitCount(int)
472void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100473 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700474}
475
476void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100477 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700478}
479
480// int java.lang.Long.bitCount(long)
481void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100482 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700483}
484
485void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100486 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, HasMsa(), GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700487}
488
Chris Larsen0b7ac982015-09-04 12:54:28 -0700489static void GenMinMaxFP(LocationSummary* locations,
490 bool is_min,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800493 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
494 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700495 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
496
Chris Larsenb74353a2015-11-20 09:07:09 -0800497 Mips64Label noNaNs;
498 Mips64Label done;
499 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
500
501 // When Java computes min/max it prefers a NaN to a number; the
502 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
503 // the inputs is a NaN and the other is a valid number, the MIPS
504 // instruction will return the number; Java wants the NaN value
505 // returned. This is why there is extra logic preceding the use of
506 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
507 // NaN, return the NaN, otherwise return the min/max.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100508 if (type == DataType::Type::kFloat64) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800509 __ CmpUnD(FTMP, a, b);
510 __ Bc1eqz(FTMP, &noNaNs);
511
512 // One of the inputs is a NaN
513 __ CmpEqD(ftmp, a, a);
514 // If a == a then b is the NaN, otherwise a is the NaN.
515 __ SelD(ftmp, a, b);
516
517 if (ftmp != out) {
518 __ MovD(out, ftmp);
519 }
520
521 __ Bc(&done);
522
523 __ Bind(&noNaNs);
524
Chris Larsen0b7ac982015-09-04 12:54:28 -0700525 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800526 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700527 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800528 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700529 }
530 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100531 DCHECK_EQ(type, DataType::Type::kFloat32);
Chris Larsenb74353a2015-11-20 09:07:09 -0800532 __ CmpUnS(FTMP, a, b);
533 __ Bc1eqz(FTMP, &noNaNs);
534
535 // One of the inputs is a NaN
536 __ CmpEqS(ftmp, a, a);
537 // If a == a then b is the NaN, otherwise a is the NaN.
538 __ SelS(ftmp, a, b);
539
540 if (ftmp != out) {
541 __ MovS(out, ftmp);
542 }
543
544 __ Bc(&done);
545
546 __ Bind(&noNaNs);
547
Chris Larsen0b7ac982015-09-04 12:54:28 -0700548 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800549 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700550 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800551 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700552 }
553 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800554
555 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700556}
557
Vladimir Markoca6fff82017-10-03 14:49:14 +0100558static void CreateFPFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
559 LocationSummary* locations =
560 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700561 locations->SetInAt(0, Location::RequiresFpuRegister());
562 locations->SetInAt(1, Location::RequiresFpuRegister());
563 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
564}
565
566// double java.lang.Math.min(double, double)
567void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100568 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700569}
570
571void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100572 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700573}
574
575// float java.lang.Math.min(float, float)
576void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100577 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700578}
579
580void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100581 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700582}
583
584// double java.lang.Math.max(double, double)
585void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100586 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700587}
588
589void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100590 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700591}
592
593// float java.lang.Math.max(float, float)
594void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100595 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700596}
597
598void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100599 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700600}
601
602static void GenMinMax(LocationSummary* locations,
603 bool is_min,
604 Mips64Assembler* assembler) {
605 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
606 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
607 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
608
Chris Larsenb74353a2015-11-20 09:07:09 -0800609 if (lhs == rhs) {
610 if (out != lhs) {
611 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700612 }
613 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800614 // Some architectures, such as ARM and MIPS (prior to r6), have a
615 // conditional move instruction which only changes the target
616 // (output) register if the condition is true (MIPS prior to r6 had
617 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
618 // change the target (output) register. If the condition is true the
619 // output register gets the contents of the "rs" register; otherwise,
620 // the output register is set to zero. One consequence of this is
621 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
622 // needs to use a pair of SELEQZ/SELNEZ instructions. After
623 // executing this pair of instructions one of the output registers
624 // from the pair will necessarily contain zero. Then the code ORs the
625 // output registers from the SELEQZ/SELNEZ instructions to get the
626 // final result.
627 //
628 // The initial test to see if the output register is same as the
629 // first input register is needed to make sure that value in the
630 // first input register isn't clobbered before we've finished
631 // computing the output value. The logic in the corresponding else
632 // clause performs the same task but makes sure the second input
633 // register isn't clobbered in the event that it's the same register
634 // as the output register; the else clause also handles the case
635 // where the output register is distinct from both the first, and the
636 // second input registers.
637 if (out == lhs) {
638 __ Slt(AT, rhs, lhs);
639 if (is_min) {
640 __ Seleqz(out, lhs, AT);
641 __ Selnez(AT, rhs, AT);
642 } else {
643 __ Selnez(out, lhs, AT);
644 __ Seleqz(AT, rhs, AT);
645 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700646 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800647 __ Slt(AT, lhs, rhs);
648 if (is_min) {
649 __ Seleqz(out, rhs, AT);
650 __ Selnez(AT, lhs, AT);
651 } else {
652 __ Selnez(out, rhs, AT);
653 __ Seleqz(AT, lhs, AT);
654 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700655 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800656 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700657 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700658}
659
Vladimir Markoca6fff82017-10-03 14:49:14 +0100660static void CreateIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
661 LocationSummary* locations =
662 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700663 locations->SetInAt(0, Location::RequiresRegister());
664 locations->SetInAt(1, Location::RequiresRegister());
665 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
666}
667
668// int java.lang.Math.min(int, int)
669void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100670 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700671}
672
673void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000674 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700675}
676
677// long java.lang.Math.min(long, long)
678void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100679 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700680}
681
682void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000683 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700684}
685
686// int java.lang.Math.max(int, int)
687void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100688 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700689}
690
691void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000692 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700693}
694
695// long java.lang.Math.max(long, long)
696void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100697 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700698}
699
700void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000701 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700702}
703
704// double java.lang.Math.sqrt(double)
705void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100706 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700707}
708
709void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
710 LocationSummary* locations = invoke->GetLocations();
711 Mips64Assembler* assembler = GetAssembler();
712 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
713 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
714
715 __ SqrtD(out, in);
716}
717
Vladimir Markoca6fff82017-10-03 14:49:14 +0100718static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700719 HInvoke* invoke,
720 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100721 LocationSummary* locations =
722 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700723 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700724 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700725}
726
727// double java.lang.Math.rint(double)
728void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100729 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700730}
731
732void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
733 LocationSummary* locations = invoke->GetLocations();
734 Mips64Assembler* assembler = GetAssembler();
735 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
736 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
737
738 __ RintD(out, in);
739}
740
741// double java.lang.Math.floor(double)
742void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100743 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700744}
745
Chris Larsen14500822015-10-01 11:35:18 -0700746const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
747 kPositiveInfinity |
748 kNegativeZero |
749 kNegativeInfinity |
750 kQuietNaN |
751 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700752
Chris Larsen81284372015-10-21 15:28:53 -0700753enum FloatRoundingMode {
754 kFloor,
755 kCeil,
756};
757
758static void GenRoundingMode(LocationSummary* locations,
759 FloatRoundingMode mode,
760 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700761 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
762 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
763
Chris Larsen81284372015-10-21 15:28:53 -0700764 DCHECK_NE(in, out);
765
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700766 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700767
Chris Larsen81284372015-10-21 15:28:53 -0700768 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700769 // if in.isNaN || in.isInfinite || in.isZero {
770 // return in;
771 // }
772 __ ClassD(out, in);
773 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700774 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700775 __ MovD(out, in);
776 __ Bnezc(AT, &done);
777
Chris Larsen81284372015-10-21 15:28:53 -0700778 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200779 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700780 // // floor()/ceil() has almost certainly returned a value
781 // // which can't be successfully represented as a signed
782 // // 64-bit number. Java expects that the input value will
783 // // be returned in these cases.
784 // // There is also a small probability that floor(in)/ceil(in)
785 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200786 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
787 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700788 // return in;
789 // }
Chris Larsen81284372015-10-21 15:28:53 -0700790 if (mode == kFloor) {
791 __ FloorLD(out, in);
792 } else if (mode == kCeil) {
793 __ CeilLD(out, in);
794 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700795 __ Dmfc1(AT, out);
796 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200797 __ Daddiu(TMP, AT, 1);
798 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
799 // or AT - 0x7FFF FFFF FFFF FFFF.
800 // IOW, TMP = 1 if AT = Long.MIN_VALUE
801 // or TMP = 0 if AT = Long.MAX_VALUE.
802 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
803 // or AT = Long.MAX_VALUE.
804 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700805
806 // double out = outLong;
807 // return out;
808 __ Dmtc1(AT, out);
809 __ Cvtdl(out, out);
810 __ Bind(&done);
811 // }
812}
813
Chris Larsen81284372015-10-21 15:28:53 -0700814void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
815 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
816}
817
Chris Larsen0b7ac982015-09-04 12:54:28 -0700818// double java.lang.Math.ceil(double)
819void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100820 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700821}
822
823void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700824 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700825}
826
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100827static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700828 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
829 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
830 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
831
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100832 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700833
834 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700835
Chris Larsen7adaab02016-04-21 14:49:20 -0700836 // out = floor(in);
837 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200838 // if (out != MAX_VALUE && out != MIN_VALUE) {
839 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700840 // return out += TMP;
841 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200842 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700843
844 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700846 __ FloorLD(FTMP, in);
847 __ Dmfc1(out, FTMP);
848 } else {
849 __ FloorWS(FTMP, in);
850 __ Mfc1(out, FTMP);
851 }
852
Lena Djokicf4e23a82017-05-09 15:43:45 +0200853 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100854 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200855 __ Daddiu(TMP, out, 1);
856 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
857 // or out - 0x7FFF FFFF FFFF FFFF.
858 // IOW, TMP = 1 if out = Long.MIN_VALUE
859 // or TMP = 0 if out = Long.MAX_VALUE.
860 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
861 // or out = Long.MAX_VALUE.
862 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700863 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200864 __ Addiu(TMP, out, 1);
865 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
866 // or out - 0x7FFF FFFF.
867 // IOW, TMP = 1 if out = Int.MIN_VALUE
868 // or TMP = 0 if out = Int.MAX_VALUE.
869 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
870 // or out = Int.MAX_VALUE.
871 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700872 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700873
874 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100875 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700876 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
877 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
878 __ SubD(FTMP, in, FTMP);
879 __ Dmtc1(AT, half);
880 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200881 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700882 } else {
883 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
884 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
885 __ SubS(FTMP, in, FTMP);
886 __ Mtc1(AT, half);
887 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200888 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700889 }
890
Chris Larsen7adaab02016-04-21 14:49:20 -0700891 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100892 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200893 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700894 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200895 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700896 }
897
898 __ Bind(&done);
899}
900
901// int java.lang.Math.round(float)
902void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100903 LocationSummary* locations =
904 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700905 locations->SetInAt(0, Location::RequiresFpuRegister());
906 locations->AddTemp(Location::RequiresFpuRegister());
907 locations->SetOut(Location::RequiresRegister());
908}
909
910void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100911 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700912}
913
914// long java.lang.Math.round(double)
915void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100916 LocationSummary* locations =
917 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700918 locations->SetInAt(0, Location::RequiresFpuRegister());
919 locations->AddTemp(Location::RequiresFpuRegister());
920 locations->SetOut(Location::RequiresRegister());
921}
922
923void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100924 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700925}
926
Chris Larsen70fb1f42015-09-04 10:15:27 -0700927// byte libcore.io.Memory.peekByte(long address)
928void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100929 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700930}
931
932void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
933 Mips64Assembler* assembler = GetAssembler();
934 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
935 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
936
937 __ Lb(out, adr, 0);
938}
939
940// short libcore.io.Memory.peekShort(long address)
941void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100942 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700943}
944
945void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
946 Mips64Assembler* assembler = GetAssembler();
947 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
948 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
949
950 __ Lh(out, adr, 0);
951}
952
953// int libcore.io.Memory.peekInt(long address)
954void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100955 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700956}
957
958void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
959 Mips64Assembler* assembler = GetAssembler();
960 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
961 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
962
963 __ Lw(out, adr, 0);
964}
965
966// long libcore.io.Memory.peekLong(long address)
967void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100968 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700969}
970
971void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
972 Mips64Assembler* assembler = GetAssembler();
973 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
974 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
975
976 __ Ld(out, adr, 0);
977}
978
Vladimir Markoca6fff82017-10-03 14:49:14 +0100979static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
980 LocationSummary* locations =
981 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700982 locations->SetInAt(0, Location::RequiresRegister());
983 locations->SetInAt(1, Location::RequiresRegister());
984}
985
986// void libcore.io.Memory.pokeByte(long address, byte value)
987void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100988 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700989}
990
991void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
992 Mips64Assembler* assembler = GetAssembler();
993 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
994 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
995
996 __ Sb(val, adr, 0);
997}
998
999// void libcore.io.Memory.pokeShort(long address, short value)
1000void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001001 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001002}
1003
1004void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1005 Mips64Assembler* assembler = GetAssembler();
1006 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1007 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1008
1009 __ Sh(val, adr, 0);
1010}
1011
1012// void libcore.io.Memory.pokeInt(long address, int value)
1013void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001014 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001015}
1016
1017void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1018 Mips64Assembler* assembler = GetAssembler();
1019 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1020 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1021
1022 __ Sw(val, adr, 00);
1023}
1024
1025// void libcore.io.Memory.pokeLong(long address, long value)
1026void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001027 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001028}
1029
1030void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1031 Mips64Assembler* assembler = GetAssembler();
1032 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1033 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1034
1035 __ Sd(val, adr, 0);
1036}
1037
Chris Larsen49e55392015-09-04 16:04:03 -07001038// Thread java.lang.Thread.currentThread()
1039void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001040 LocationSummary* locations =
1041 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -07001042 locations->SetOut(Location::RequiresRegister());
1043}
1044
1045void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1046 Mips64Assembler* assembler = GetAssembler();
1047 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1048
1049 __ LoadFromOffset(kLoadUnsignedWord,
1050 out,
1051 TR,
1052 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1053}
1054
Vladimir Markoca6fff82017-10-03 14:49:14 +01001055static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -08001056 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001057 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -08001058 bool can_call = kEmitCompilerReadBarrier &&
1059 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1060 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001061 LocationSummary* locations =
1062 new (allocator) LocationSummary(invoke,
1063 can_call
1064 ? LocationSummary::kCallOnSlowPath
1065 : LocationSummary::kNoCall,
1066 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001067 if (can_call && kUseBakerReadBarrier) {
1068 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1069 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001070 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1071 locations->SetInAt(1, Location::RequiresRegister());
1072 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001073 locations->SetOut(Location::RequiresRegister(),
1074 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001075 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -08001076 // We need a temporary register for the read barrier marking slow
1077 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
1078 locations->AddTemp(Location::RequiresRegister());
1079 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001080}
1081
Alexey Frunze15958152017-02-09 19:08:30 -08001082// Note that the caller must supply a properly aligned memory address.
1083// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001084static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001085 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001086 bool is_volatile,
1087 CodeGeneratorMIPS64* codegen) {
1088 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001089 DCHECK((type == DataType::Type::kInt32) ||
1090 (type == DataType::Type::kInt64) ||
1091 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -07001092 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001093 // Target register.
1094 Location trg_loc = locations->Out();
1095 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001096 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001097 Location base_loc = locations->InAt(1);
1098 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001099 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001100 Location offset_loc = locations->InAt(2);
1101 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001102
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001103 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -08001104 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -07001105 }
Alexey Frunze15958152017-02-09 19:08:30 -08001106
Chris Larsen1360ada2015-09-04 23:38:16 -07001107 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001108 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -08001109 __ Ld(trg, TMP, 0);
1110 if (is_volatile) {
1111 __ Sync(0);
1112 }
1113 break;
1114
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001115 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -07001116 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -08001117 if (is_volatile) {
1118 __ Sync(0);
1119 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001120 break;
1121
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001122 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -08001123 if (kEmitCompilerReadBarrier) {
1124 if (kUseBakerReadBarrier) {
1125 Location temp = locations->GetTemp(0);
1126 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1127 trg_loc,
1128 base,
1129 /* offset */ 0U,
1130 /* index */ offset_loc,
1131 TIMES_1,
1132 temp,
1133 /* needs_null_check */ false);
1134 if (is_volatile) {
1135 __ Sync(0);
1136 }
1137 } else {
1138 __ Lwu(trg, TMP, 0);
1139 if (is_volatile) {
1140 __ Sync(0);
1141 }
1142 codegen->GenerateReadBarrierSlow(invoke,
1143 trg_loc,
1144 trg_loc,
1145 base_loc,
1146 /* offset */ 0U,
1147 /* index */ offset_loc);
1148 }
1149 } else {
1150 __ Lwu(trg, TMP, 0);
1151 if (is_volatile) {
1152 __ Sync(0);
1153 }
1154 __ MaybeUnpoisonHeapReference(trg);
1155 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001156 break;
1157
1158 default:
1159 LOG(FATAL) << "Unsupported op size " << type;
1160 UNREACHABLE();
1161 }
1162}
1163
1164// int sun.misc.Unsafe.getInt(Object o, long offset)
1165void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001166 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001167}
1168
1169void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001170 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001171}
1172
1173// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1174void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001175 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001176}
1177
1178void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001179 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001180}
1181
1182// long sun.misc.Unsafe.getLong(Object o, long offset)
1183void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001184 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001185}
1186
1187void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001188 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001189}
1190
1191// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1192void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001193 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001194}
1195
1196void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001197 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001198}
1199
1200// Object sun.misc.Unsafe.getObject(Object o, long offset)
1201void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001202 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001203}
1204
1205void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001206 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001207}
1208
1209// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1210void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001211 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001212}
1213
1214void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001215 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001216}
1217
Vladimir Markoca6fff82017-10-03 14:49:14 +01001218static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1219 LocationSummary* locations =
1220 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001221 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1222 locations->SetInAt(1, Location::RequiresRegister());
1223 locations->SetInAt(2, Location::RequiresRegister());
1224 locations->SetInAt(3, Location::RequiresRegister());
1225}
1226
Alexey Frunze15958152017-02-09 19:08:30 -08001227// Note that the caller must supply a properly aligned memory address.
1228// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001229static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001230 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001231 bool is_volatile,
1232 bool is_ordered,
1233 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001234 DCHECK((type == DataType::Type::kInt32) ||
1235 (type == DataType::Type::kInt64) ||
1236 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001237 Mips64Assembler* assembler = codegen->GetAssembler();
1238 // Object pointer.
1239 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1240 // Long offset.
1241 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1242 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1243
1244 __ Daddu(TMP, base, offset);
1245 if (is_volatile || is_ordered) {
1246 __ Sync(0);
1247 }
1248 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001249 case DataType::Type::kInt32:
1250 case DataType::Type::kReference:
1251 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001252 __ PoisonHeapReference(AT, value);
1253 __ Sw(AT, TMP, 0);
1254 } else {
1255 __ Sw(value, TMP, 0);
1256 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001257 break;
1258
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001259 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001260 __ Sd(value, TMP, 0);
1261 break;
1262
1263 default:
1264 LOG(FATAL) << "Unsupported op size " << type;
1265 UNREACHABLE();
1266 }
1267 if (is_volatile) {
1268 __ Sync(0);
1269 }
1270
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001271 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001272 bool value_can_be_null = true; // TODO: Worth finding out this information?
1273 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001274 }
1275}
1276
1277// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1278void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001279 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001280}
1281
1282void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001283 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001284 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001285 /* is_volatile */ false,
1286 /* is_ordered */ false,
1287 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001288}
1289
1290// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1291void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001292 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001293}
1294
1295void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001296 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001297 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001298 /* is_volatile */ false,
1299 /* is_ordered */ true,
1300 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001301}
1302
1303// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1304void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001305 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001306}
1307
1308void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001309 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001310 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001311 /* is_volatile */ true,
1312 /* is_ordered */ false,
1313 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001314}
1315
1316// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1317void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001318 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001319}
1320
1321void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001322 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001323 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001324 /* is_volatile */ false,
1325 /* is_ordered */ false,
1326 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001327}
1328
1329// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1330void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001331 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001332}
1333
1334void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001335 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001336 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001337 /* is_volatile */ false,
1338 /* is_ordered */ true,
1339 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001340}
1341
1342// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1343void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001344 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001345}
1346
1347void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001348 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001350 /* is_volatile */ true,
1351 /* is_ordered */ false,
1352 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001353}
1354
1355// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1356void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001357 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001358}
1359
1360void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001361 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001362 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001363 /* is_volatile */ false,
1364 /* is_ordered */ false,
1365 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001366}
1367
1368// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1369void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001370 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001371}
1372
1373void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001374 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001375 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001376 /* is_volatile */ false,
1377 /* is_ordered */ true,
1378 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001379}
1380
1381// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1382void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001383 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001384}
1385
1386void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001387 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001388 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001389 /* is_volatile */ true,
1390 /* is_ordered */ false,
1391 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001392}
1393
Vladimir Markoca6fff82017-10-03 14:49:14 +01001394static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001395 bool can_call = kEmitCompilerReadBarrier &&
1396 kUseBakerReadBarrier &&
1397 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001398 LocationSummary* locations =
1399 new (allocator) LocationSummary(invoke,
1400 can_call
1401 ? LocationSummary::kCallOnSlowPath
1402 : LocationSummary::kNoCall,
1403 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001404 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1405 locations->SetInAt(1, Location::RequiresRegister());
1406 locations->SetInAt(2, Location::RequiresRegister());
1407 locations->SetInAt(3, Location::RequiresRegister());
1408 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001409 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001410
1411 // Temporary register used in CAS by (Baker) read barrier.
1412 if (can_call) {
1413 locations->AddTemp(Location::RequiresRegister());
1414 }
Chris Larsen36427492015-10-23 02:19:38 -07001415}
1416
Alexey Frunze15958152017-02-09 19:08:30 -08001417// Note that the caller must supply a properly aligned memory address.
1418// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001419static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001420 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001421 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001422 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001423 Location offset_loc = locations->InAt(2);
1424 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001425 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1426 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001427 Location out_loc = locations->Out();
1428 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001429
1430 DCHECK_NE(base, out);
1431 DCHECK_NE(offset, out);
1432 DCHECK_NE(expected, out);
1433
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001434 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001435 // The only read barrier implementation supporting the
1436 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1437 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1438
1439 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1440 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001441 bool value_can_be_null = true; // TODO: Worth finding out this information?
1442 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001443
1444 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1445 Location temp = locations->GetTemp(0);
1446 // Need to make sure the reference stored in the field is a to-space
1447 // one before attempting the CAS or the CAS could fail incorrectly.
1448 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1449 invoke,
1450 out_loc, // Unused, used only as a "temporary" within the read barrier.
1451 base,
1452 /* offset */ 0u,
1453 /* index */ offset_loc,
1454 ScaleFactor::TIMES_1,
1455 temp,
1456 /* needs_null_check */ false,
1457 /* always_update_field */ true);
1458 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001459 }
1460
Alexey Frunzec061de12017-02-14 13:27:23 -08001461 Mips64Label loop_head, exit_loop;
1462 __ Daddu(TMP, base, offset);
1463
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001464 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001465 __ PoisonHeapReference(expected);
1466 // Do not poison `value`, if it is the same register as
1467 // `expected`, which has just been poisoned.
1468 if (value != expected) {
1469 __ PoisonHeapReference(value);
1470 }
1471 }
1472
Chris Larsen36427492015-10-23 02:19:38 -07001473 // do {
1474 // tmp_value = [tmp_ptr] - expected;
1475 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1476 // result = tmp_value != 0;
1477
Chris Larsen36427492015-10-23 02:19:38 -07001478 __ Sync(0);
1479 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001480 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001481 __ Lld(out, TMP);
1482 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001483 // Note: We will need a read barrier here, when read barrier
1484 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001485 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001486 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001487 // The LL instruction sign-extends the 32-bit value, but
1488 // 32-bit references must be zero-extended. Zero-extend `out`.
1489 __ Dext(out, out, 0, 32);
1490 }
Chris Larsen36427492015-10-23 02:19:38 -07001491 }
1492 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1493 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1494 __ Beqzc(out, &exit_loop); // return.
1495 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1496 // If we use 'value' directly, we would lose 'value'
1497 // in the case that the store fails. Whether the
1498 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001499 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001500 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001501 __ Scd(out, TMP);
1502 } else {
1503 __ Sc(out, TMP);
1504 }
1505 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1506 // cycle atomically then retry.
1507 __ Bind(&exit_loop);
1508 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001509
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001510 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001511 __ UnpoisonHeapReference(expected);
1512 // Do not unpoison `value`, if it is the same register as
1513 // `expected`, which has just been unpoisoned.
1514 if (value != expected) {
1515 __ UnpoisonHeapReference(value);
1516 }
1517 }
Chris Larsen36427492015-10-23 02:19:38 -07001518}
1519
1520// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1521void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001522 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001523}
1524
1525void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001526 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001527}
1528
1529// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1530void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001531 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001532}
1533
1534void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001535 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001536}
1537
1538// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1539void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001540 // The only read barrier implementation supporting the
1541 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1542 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1543 return;
1544 }
1545
Vladimir Markoca6fff82017-10-03 14:49:14 +01001546 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001547}
1548
1549void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001550 // The only read barrier implementation supporting the
1551 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1552 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1553
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001554 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001555}
1556
Chris Larsen9701c2e2015-09-04 17:22:47 -07001557// int java.lang.String.compareTo(String anotherString)
1558void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001559 LocationSummary* locations = new (allocator_) LocationSummary(
1560 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001561 InvokeRuntimeCallingConvention calling_convention;
1562 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1563 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001564 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001565 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1566}
1567
1568void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1569 Mips64Assembler* assembler = GetAssembler();
1570 LocationSummary* locations = invoke->GetLocations();
1571
1572 // Note that the null check must have been done earlier.
1573 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1574
1575 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001576 SlowPathCodeMIPS64* slow_path =
1577 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001578 codegen_->AddSlowPath(slow_path);
1579 __ Beqzc(argument, slow_path->GetEntryLabel());
1580
Serban Constantinescufc734082016-07-19 17:18:07 +01001581 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001582 __ Bind(slow_path->GetExitLabel());
1583}
1584
Chris Larsen972d6d72015-10-20 11:29:12 -07001585// boolean java.lang.String.equals(Object anObject)
1586void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoda283052017-11-07 21:17:24 +00001587 if (kEmitCompilerReadBarrier &&
1588 !StringEqualsOptimizations(invoke).GetArgumentIsString() &&
1589 !StringEqualsOptimizations(invoke).GetNoReadBarrierForStringClass()) {
1590 // No support for this odd case (String class is moveable, not in the boot image).
1591 return;
1592 }
1593
Vladimir Markoca6fff82017-10-03 14:49:14 +01001594 LocationSummary* locations =
1595 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001596 locations->SetInAt(0, Location::RequiresRegister());
1597 locations->SetInAt(1, Location::RequiresRegister());
1598 locations->SetOut(Location::RequiresRegister());
1599
1600 // Temporary registers to store lengths of strings and for calculations.
1601 locations->AddTemp(Location::RequiresRegister());
1602 locations->AddTemp(Location::RequiresRegister());
1603 locations->AddTemp(Location::RequiresRegister());
1604}
1605
1606void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1607 Mips64Assembler* assembler = GetAssembler();
1608 LocationSummary* locations = invoke->GetLocations();
1609
1610 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1611 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1612 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1613
1614 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1615 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1616 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1617
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001618 Mips64Label loop;
1619 Mips64Label end;
1620 Mips64Label return_true;
1621 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001622
1623 // Get offsets of count, value, and class fields within a string object.
1624 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1625 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1626 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1627
1628 // Note that the null check must have been done earlier.
1629 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1630
1631 // If the register containing the pointer to "this", and the register
1632 // containing the pointer to "anObject" are the same register then
1633 // "this", and "anObject" are the same object and we can
1634 // short-circuit the logic to a true result.
1635 if (str == arg) {
1636 __ LoadConst64(out, 1);
1637 return;
1638 }
1639
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001640 StringEqualsOptimizations optimizations(invoke);
1641 if (!optimizations.GetArgumentNotNull()) {
1642 // Check if input is null, return false if it is.
1643 __ Beqzc(arg, &return_false);
1644 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001645
1646 // Reference equality check, return true if same reference.
1647 __ Beqc(str, arg, &return_true);
1648
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001649 if (!optimizations.GetArgumentIsString()) {
1650 // Instanceof check for the argument by comparing class fields.
1651 // All string objects must have the same type since String cannot be subclassed.
1652 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1653 // If the argument is a string object, its class field must be equal to receiver's class field.
1654 __ Lw(temp1, str, class_offset);
1655 __ Lw(temp2, arg, class_offset);
1656 __ Bnec(temp1, temp2, &return_false);
1657 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001658
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001659 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001660 __ Lw(temp1, str, count_offset);
1661 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001662 // Check if `count` fields are equal, return false if they're not.
1663 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001664 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001665 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1666 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1667 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001668 __ Beqzc(temp1, &return_true);
1669
1670 // Don't overwrite input registers
1671 __ Move(TMP, str);
1672 __ Move(temp3, arg);
1673
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001674 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001675 DCHECK_ALIGNED(value_offset, 8);
1676 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1677
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001678 if (mirror::kUseStringCompression) {
1679 // For string compression, calculate the number of bytes to compare (not chars).
1680 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1681 __ Srl(temp1, temp1, 1); // Extract length.
1682 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1683 }
1684
1685 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1686 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001687 __ Bind(&loop);
1688 __ Ld(out, TMP, value_offset);
1689 __ Ld(temp2, temp3, value_offset);
1690 __ Bnec(out, temp2, &return_false);
1691 __ Daddiu(TMP, TMP, 8);
1692 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001693 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1694 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001695 __ Bgtzc(temp1, &loop);
1696
1697 // Return true and exit the function.
1698 // If loop does not result in returning false, we return true.
1699 __ Bind(&return_true);
1700 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001701 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001702
1703 // Return false and exit the function.
1704 __ Bind(&return_false);
1705 __ LoadConst64(out, 0);
1706 __ Bind(&end);
1707}
1708
Chris Larsen9701c2e2015-09-04 17:22:47 -07001709static void GenerateStringIndexOf(HInvoke* invoke,
1710 Mips64Assembler* assembler,
1711 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001712 bool start_at_zero) {
1713 LocationSummary* locations = invoke->GetLocations();
1714 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1715
1716 // Note that the null check must have been done earlier.
1717 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1718
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001719 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1720 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001721 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001722 HInstruction* code_point = invoke->InputAt(1);
1723 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001724 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001725 // Always needs the slow-path. We could directly dispatch to it,
1726 // but this case should be rare, so for simplicity just put the
1727 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001728 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001729 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001730 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001731 __ Bind(slow_path->GetExitLabel());
1732 return;
1733 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001734 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001735 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1736 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001737 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001738 codegen->AddSlowPath(slow_path);
1739 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1740 }
1741
1742 if (start_at_zero) {
1743 DCHECK_EQ(tmp_reg, A2);
1744 // Start-index = 0.
1745 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001746 }
1747
Serban Constantinescufc734082016-07-19 17:18:07 +01001748 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001749 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001750
1751 if (slow_path != nullptr) {
1752 __ Bind(slow_path->GetExitLabel());
1753 }
1754}
1755
1756// int java.lang.String.indexOf(int ch)
1757void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001758 LocationSummary* locations = new (allocator_) LocationSummary(
1759 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001760 // We have a hand-crafted assembly stub that follows the runtime
1761 // calling convention. So it's best to align the inputs accordingly.
1762 InvokeRuntimeCallingConvention calling_convention;
1763 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1764 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001765 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001766 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1767
1768 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1769 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1770}
1771
1772void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001773 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001774}
1775
1776// int java.lang.String.indexOf(int ch, int fromIndex)
1777void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001778 LocationSummary* locations = new (allocator_) LocationSummary(
1779 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001780 // We have a hand-crafted assembly stub that follows the runtime
1781 // calling convention. So it's best to align the inputs accordingly.
1782 InvokeRuntimeCallingConvention calling_convention;
1783 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1784 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1785 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001786 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001787 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1788}
1789
1790void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001791 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001792}
1793
Roland Levillaincc3839c2016-02-29 16:23:48 +00001794// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001795void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001796 LocationSummary* locations = new (allocator_) LocationSummary(
1797 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001798 InvokeRuntimeCallingConvention calling_convention;
1799 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1800 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1801 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1802 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001803 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001804 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1805}
1806
1807void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1808 Mips64Assembler* assembler = GetAssembler();
1809 LocationSummary* locations = invoke->GetLocations();
1810
1811 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001812 SlowPathCodeMIPS64* slow_path =
1813 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001814 codegen_->AddSlowPath(slow_path);
1815 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1816
Serban Constantinescufc734082016-07-19 17:18:07 +01001817 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001818 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001819 __ Bind(slow_path->GetExitLabel());
1820}
1821
Roland Levillaincc3839c2016-02-29 16:23:48 +00001822// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001823void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001824 LocationSummary* locations =
1825 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001826 InvokeRuntimeCallingConvention calling_convention;
1827 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1828 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1829 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001830 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001831 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1832}
1833
1834void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001835 // No need to emit code checking whether `locations->InAt(2)` is a null
1836 // pointer, as callers of the native method
1837 //
1838 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1839 //
1840 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001841 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001842 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001843}
1844
Roland Levillainf969a202016-03-09 16:14:00 +00001845// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001846void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001847 LocationSummary* locations = new (allocator_) LocationSummary(
1848 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001849 InvokeRuntimeCallingConvention calling_convention;
1850 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001851 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001852 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1853}
1854
1855void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1856 Mips64Assembler* assembler = GetAssembler();
1857 LocationSummary* locations = invoke->GetLocations();
1858
1859 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001860 SlowPathCodeMIPS64* slow_path =
1861 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001862 codegen_->AddSlowPath(slow_path);
1863 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1864
Serban Constantinescufc734082016-07-19 17:18:07 +01001865 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001866 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001867 __ Bind(slow_path->GetExitLabel());
1868}
1869
Chris Larsenddec7f92016-02-16 12:35:04 -08001870static void GenIsInfinite(LocationSummary* locations,
1871 bool is64bit,
1872 Mips64Assembler* assembler) {
1873 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1874 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1875
1876 if (is64bit) {
1877 __ ClassD(FTMP, in);
1878 } else {
1879 __ ClassS(FTMP, in);
1880 }
1881 __ Mfc1(out, FTMP);
1882 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1883 __ Sltu(out, ZERO, out);
1884}
1885
1886// boolean java.lang.Float.isInfinite(float)
1887void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001888 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001889}
1890
1891void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1892 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1893}
1894
1895// boolean java.lang.Double.isInfinite(double)
1896void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001897 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001898}
1899
1900void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1901 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1902}
1903
Chris Larsene3660592016-11-09 11:13:42 -08001904// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1905void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001906 LocationSummary* locations =
1907 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001908 locations->SetInAt(0, Location::RequiresRegister());
1909 locations->SetInAt(1, Location::RequiresRegister());
1910 locations->SetInAt(2, Location::RequiresRegister());
1911 locations->SetInAt(3, Location::RequiresRegister());
1912 locations->SetInAt(4, Location::RequiresRegister());
1913
Chris Larsen366d4332017-03-23 09:02:56 -07001914 locations->AddTemp(Location::RequiresRegister());
1915 locations->AddTemp(Location::RequiresRegister());
1916 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001917}
1918
1919void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1920 Mips64Assembler* assembler = GetAssembler();
1921 LocationSummary* locations = invoke->GetLocations();
1922
1923 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001924 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001925 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001926 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001927
1928 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1929 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1930 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1931 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1932 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1933
1934 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001935 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001936 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001937
1938 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001939 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001940
1941 // Location of data in char array buffer.
1942 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1943
1944 // Get offset of value field within a string object.
1945 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1946
1947 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1948
1949 // Calculate number of characters to be copied.
1950 __ Dsubu(numChrs, srcEnd, srcBegin);
1951
1952 // Calculate destination address.
1953 __ Daddiu(dstPtr, dstObj, data_offset);
1954 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1955
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001956 if (mirror::kUseStringCompression) {
1957 Mips64Label uncompressed_copy, compressed_loop;
1958 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1959 // Load count field and extract compression flag.
1960 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1961 __ Dext(TMP, TMP, 0, 1);
1962
Chris Larsen366d4332017-03-23 09:02:56 -07001963 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001964 __ Bnezc(TMP, &uncompressed_copy);
1965
1966 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1967 __ Daddu(srcPtr, srcObj, srcBegin);
1968 __ Bind(&compressed_loop);
1969 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1970 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1971 __ Daddiu(numChrs, numChrs, -1);
1972 __ Daddiu(srcPtr, srcPtr, 1);
1973 __ Daddiu(dstPtr, dstPtr, 2);
1974 __ Bnezc(numChrs, &compressed_loop);
1975
1976 __ Bc(&done);
1977 __ Bind(&uncompressed_copy);
1978 }
1979
Chris Larsene3660592016-11-09 11:13:42 -08001980 // Calculate source address.
1981 __ Daddiu(srcPtr, srcObj, value_offset);
1982 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1983
Chris Larsen366d4332017-03-23 09:02:56 -07001984 __ Bind(&loop);
1985 __ Lh(AT, srcPtr, 0);
1986 __ Daddiu(numChrs, numChrs, -1);
1987 __ Daddiu(srcPtr, srcPtr, char_size);
1988 __ Sh(AT, dstPtr, 0);
1989 __ Daddiu(dstPtr, dstPtr, char_size);
1990 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001991
1992 __ Bind(&done);
1993}
1994
Chris Larsen5863f852017-03-23 15:41:37 -07001995// static void java.lang.System.arraycopy(Object src, int srcPos,
1996// Object dest, int destPos,
1997// int length)
1998void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1999 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2000 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2001 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2002
2003 // As long as we are checking, we might as well check to see if the src and dest
2004 // positions are >= 0.
2005 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2006 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2007 // We will have to fail anyways.
2008 return;
2009 }
2010
2011 // And since we are already checking, check the length too.
2012 if (length != nullptr) {
2013 int32_t len = length->GetValue();
2014 if (len < 0) {
2015 // Just call as normal.
2016 return;
2017 }
2018 }
2019
2020 // Okay, it is safe to generate inline code.
2021 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002022 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07002023 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2024 locations->SetInAt(0, Location::RequiresRegister());
2025 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2026 locations->SetInAt(2, Location::RequiresRegister());
2027 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2028 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2029
2030 locations->AddTemp(Location::RequiresRegister());
2031 locations->AddTemp(Location::RequiresRegister());
2032 locations->AddTemp(Location::RequiresRegister());
2033}
2034
2035// Utility routine to verify that "length(input) - pos >= length"
2036static void EnoughItems(Mips64Assembler* assembler,
2037 GpuRegister length_input_minus_pos,
2038 Location length,
2039 SlowPathCodeMIPS64* slow_path) {
2040 if (length.IsConstant()) {
2041 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2042
2043 if (IsInt<16>(length_constant)) {
2044 __ Slti(TMP, length_input_minus_pos, length_constant);
2045 __ Bnezc(TMP, slow_path->GetEntryLabel());
2046 } else {
2047 __ LoadConst32(TMP, length_constant);
2048 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2049 }
2050 } else {
2051 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2052 }
2053}
2054
2055static void CheckPosition(Mips64Assembler* assembler,
2056 Location pos,
2057 GpuRegister input,
2058 Location length,
2059 SlowPathCodeMIPS64* slow_path,
2060 bool length_is_input_length = false) {
2061 // Where is the length in the Array?
2062 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2063
2064 // Calculate length(input) - pos.
2065 if (pos.IsConstant()) {
2066 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2067 if (pos_const == 0) {
2068 if (!length_is_input_length) {
2069 // Check that length(input) >= length.
2070 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2071 EnoughItems(assembler, AT, length, slow_path);
2072 }
2073 } else {
2074 // Check that (length(input) - pos) >= zero.
2075 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2076 DCHECK_GT(pos_const, 0);
2077 __ Addiu32(AT, AT, -pos_const);
2078 __ Bltzc(AT, slow_path->GetEntryLabel());
2079
2080 // Verify that (length(input) - pos) >= length.
2081 EnoughItems(assembler, AT, length, slow_path);
2082 }
2083 } else if (length_is_input_length) {
2084 // The only way the copy can succeed is if pos is zero.
2085 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2086 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
2087 } else {
2088 // Verify that pos >= 0.
2089 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2090 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
2091
2092 // Check that (length(input) - pos) >= zero.
2093 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2094 __ Subu(AT, AT, pos_reg);
2095 __ Bltzc(AT, slow_path->GetEntryLabel());
2096
2097 // Verify that (length(input) - pos) >= length.
2098 EnoughItems(assembler, AT, length, slow_path);
2099 }
2100}
2101
2102void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2103 Mips64Assembler* assembler = GetAssembler();
2104 LocationSummary* locations = invoke->GetLocations();
2105
2106 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
2107 Location src_pos = locations->InAt(1);
2108 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
2109 Location dest_pos = locations->InAt(3);
2110 Location length = locations->InAt(4);
2111
2112 Mips64Label loop;
2113
2114 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
2115 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
2116 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
2117
Vladimir Marko174b2e22017-10-12 13:34:49 +01002118 SlowPathCodeMIPS64* slow_path =
2119 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07002120 codegen_->AddSlowPath(slow_path);
2121
2122 // Bail out if the source and destination are the same (to handle overlap).
2123 __ Beqc(src, dest, slow_path->GetEntryLabel());
2124
2125 // Bail out if the source is null.
2126 __ Beqzc(src, slow_path->GetEntryLabel());
2127
2128 // Bail out if the destination is null.
2129 __ Beqzc(dest, slow_path->GetEntryLabel());
2130
2131 // Load length into register for count.
2132 if (length.IsConstant()) {
2133 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2134 } else {
2135 // If the length is negative, bail out.
2136 // We have already checked in the LocationsBuilder for the constant case.
2137 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2138
2139 __ Move(count, length.AsRegister<GpuRegister>());
2140 }
2141
2142 // Validity checks: source.
2143 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2144
2145 // Validity checks: dest.
2146 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2147
2148 // If count is zero, we're done.
2149 __ Beqzc(count, slow_path->GetExitLabel());
2150
2151 // Okay, everything checks out. Finally time to do the copy.
2152 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002153 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002154 DCHECK_EQ(char_size, 2u);
2155
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002156 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002157
2158 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2159
2160 // Calculate source and destination addresses.
2161 if (src_pos.IsConstant()) {
2162 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2163
2164 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
2165 } else {
2166 __ Daddiu64(src_base, src, data_offset, TMP);
2167 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
2168 }
2169 if (dest_pos.IsConstant()) {
2170 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2171
2172 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2173 } else {
2174 __ Daddiu64(dest_base, dest, data_offset, TMP);
2175 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
2176 }
2177
2178 __ Bind(&loop);
2179 __ Lh(TMP, src_base, 0);
2180 __ Daddiu(src_base, src_base, char_size);
2181 __ Daddiu(count, count, -1);
2182 __ Sh(TMP, dest_base, 0);
2183 __ Daddiu(dest_base, dest_base, char_size);
2184 __ Bnezc(count, &loop);
2185
2186 __ Bind(slow_path->GetExitLabel());
2187}
2188
Chris Larsenab922502016-04-15 10:00:56 -07002189static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002190 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002191 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002192 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002193
2194 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2195 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2196
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002197 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002198 __ Dclz(TMP, in);
2199 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07002200 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002201 } else {
2202 __ Clz(TMP, in);
2203 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07002204 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002205 }
2206 // For either value of "type", when "in" is zero, "out" should also
2207 // be zero. Without this extra "and" operation, when "in" is zero,
2208 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
2209 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
2210 // the shift amount (TMP) directly; they use either (TMP % 64) or
2211 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07002212 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07002213}
2214
2215// int java.lang.Integer.highestOneBit(int)
2216void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002217 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002218}
2219
2220void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002221 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002222}
2223
2224// long java.lang.Long.highestOneBit(long)
2225void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002226 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002227}
2228
2229void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002230 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002231}
2232
2233static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002234 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002235 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002236 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002237
2238 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2239 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2240
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002241 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002242 __ Dsubu(TMP, ZERO, in);
2243 } else {
2244 __ Subu(TMP, ZERO, in);
2245 }
2246 __ And(out, TMP, in);
2247}
2248
2249// int java.lang.Integer.lowestOneBit(int)
2250void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002251 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002252}
2253
2254void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002255 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002256}
2257
2258// long java.lang.Long.lowestOneBit(long)
2259void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002260 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002261}
2262
2263void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002264 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002265}
2266
Vladimir Markoca6fff82017-10-03 14:49:14 +01002267static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2268 LocationSummary* locations =
2269 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002270 InvokeRuntimeCallingConvention calling_convention;
2271
2272 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002273 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002274}
2275
Vladimir Markoca6fff82017-10-03 14:49:14 +01002276static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2277 LocationSummary* locations =
2278 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002279 InvokeRuntimeCallingConvention calling_convention;
2280
2281 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2282 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002283 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002284}
2285
2286static void GenFPToFPCall(HInvoke* invoke,
2287 CodeGeneratorMIPS64* codegen,
2288 QuickEntrypointEnum entry) {
2289 LocationSummary* locations = invoke->GetLocations();
2290 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2291 DCHECK_EQ(in, F12);
2292 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2293 DCHECK_EQ(out, F0);
2294
2295 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2296}
2297
2298static void GenFPFPToFPCall(HInvoke* invoke,
2299 CodeGeneratorMIPS64* codegen,
2300 QuickEntrypointEnum entry) {
2301 LocationSummary* locations = invoke->GetLocations();
2302 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2303 DCHECK_EQ(in0, F12);
2304 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2305 DCHECK_EQ(in1, F13);
2306 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2307 DCHECK_EQ(out, F0);
2308
2309 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2310}
2311
2312// static double java.lang.Math.cos(double a)
2313void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002314 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002315}
2316
2317void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2318 GenFPToFPCall(invoke, codegen_, kQuickCos);
2319}
2320
2321// static double java.lang.Math.sin(double a)
2322void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002323 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002324}
2325
2326void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2327 GenFPToFPCall(invoke, codegen_, kQuickSin);
2328}
2329
2330// static double java.lang.Math.acos(double a)
2331void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002332 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002333}
2334
2335void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2336 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2337}
2338
2339// static double java.lang.Math.asin(double a)
2340void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002341 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002342}
2343
2344void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2345 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2346}
2347
2348// static double java.lang.Math.atan(double a)
2349void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002350 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002351}
2352
2353void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2354 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2355}
2356
2357// static double java.lang.Math.atan2(double y, double x)
2358void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002359 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002360}
2361
2362void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2363 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2364}
2365
Vladimir Marko4d179872018-01-19 14:50:10 +00002366// static double java.lang.Math.pow(double y, double x)
2367void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2368 CreateFPFPToFPCallLocations(allocator_, invoke);
2369}
2370
2371void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2372 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2373}
2374
Chris Larsen74c20582017-03-28 22:17:35 -07002375// static double java.lang.Math.cbrt(double a)
2376void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002377 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002378}
2379
2380void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2381 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2382}
2383
2384// static double java.lang.Math.cosh(double x)
2385void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002386 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002387}
2388
2389void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2390 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2391}
2392
2393// static double java.lang.Math.exp(double a)
2394void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002395 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002396}
2397
2398void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2399 GenFPToFPCall(invoke, codegen_, kQuickExp);
2400}
2401
2402// static double java.lang.Math.expm1(double x)
2403void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002404 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002405}
2406
2407void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2408 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2409}
2410
2411// static double java.lang.Math.hypot(double x, double y)
2412void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002413 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002414}
2415
2416void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2417 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2418}
2419
2420// static double java.lang.Math.log(double a)
2421void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002422 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002423}
2424
2425void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2426 GenFPToFPCall(invoke, codegen_, kQuickLog);
2427}
2428
2429// static double java.lang.Math.log10(double x)
2430void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002431 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002432}
2433
2434void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2435 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2436}
2437
2438// static double java.lang.Math.nextAfter(double start, double direction)
2439void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002440 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002441}
2442
2443void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2444 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2445}
2446
2447// static double java.lang.Math.sinh(double x)
2448void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002449 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002450}
2451
2452void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2453 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2454}
2455
2456// static double java.lang.Math.tan(double a)
2457void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002458 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002459}
2460
2461void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2462 GenFPToFPCall(invoke, codegen_, kQuickTan);
2463}
2464
2465// static double java.lang.Math.tanh(double x)
2466void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002467 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002468}
2469
2470void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2471 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2472}
2473
Chris Larsen5633ce72017-04-10 15:47:40 -07002474// long java.lang.Integer.valueOf(long)
2475void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2476 InvokeRuntimeCallingConvention calling_convention;
2477 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2478 invoke,
2479 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002480 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002481 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2482}
2483
2484void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2485 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2486 LocationSummary* locations = invoke->GetLocations();
2487 Mips64Assembler* assembler = GetAssembler();
2488 InstructionCodeGeneratorMIPS64* icodegen =
2489 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2490
2491 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2492 InvokeRuntimeCallingConvention calling_convention;
2493 if (invoke->InputAt(0)->IsConstant()) {
2494 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2495 if (value >= info.low && value <= info.high) {
2496 // Just embed the j.l.Integer in the code.
2497 ScopedObjectAccess soa(Thread::Current());
2498 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2499 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2500 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2501 __ LoadConst64(out, address);
2502 } else {
2503 // Allocate and initialize a new j.l.Integer.
2504 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2505 // JIT object table.
2506 uint32_t address =
2507 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2508 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2509 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2510 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2511 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2512 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2513 // one.
2514 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2515 }
2516 } else {
2517 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2518 Mips64Label allocate, done;
2519 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2520
2521 // Is (info.low <= in) && (in <= info.high)?
2522 __ Addiu32(out, in, -info.low);
2523 // As unsigned quantities is out < (info.high - info.low + 1)?
2524 __ LoadConst32(AT, count);
2525 // Branch if out >= (info.high - info.low + 1).
2526 // This means that "in" is outside of the range [info.low, info.high].
2527 __ Bgeuc(out, AT, &allocate);
2528
2529 // If the value is within the bounds, load the j.l.Integer directly from the array.
2530 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2531 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2532 __ LoadConst64(TMP, data_offset + address);
2533 __ Dlsa(out, out, TMP, TIMES_4);
2534 __ Lwu(out, out, 0);
2535 __ MaybeUnpoisonHeapReference(out);
2536 __ Bc(&done);
2537
2538 __ Bind(&allocate);
2539 // Otherwise allocate and initialize a new j.l.Integer.
2540 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2541 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2542 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2543 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2544 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2545 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2546 // one.
2547 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2548 __ Bind(&done);
2549 }
2550}
2551
Chris Larsenb065b032017-11-02 12:13:20 -07002552// static boolean java.lang.Thread.interrupted()
2553void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2554 LocationSummary* locations =
2555 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2556 locations->SetOut(Location::RequiresRegister());
2557}
2558
2559void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2560 Mips64Assembler* assembler = GetAssembler();
2561 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2562 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2563 __ LoadFromOffset(kLoadWord, out, TR, offset);
2564 Mips64Label done;
2565 __ Beqzc(out, &done);
2566 __ Sync(0);
2567 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2568 __ Sync(0);
2569 __ Bind(&done);
2570}
2571
Aart Bik2f9fcc92016-03-01 15:16:54 -08002572UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002573UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002574
Aart Bikff7d89c2016-11-07 08:49:28 -08002575UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2576UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002577UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2578UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2579UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2580UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2581UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2582UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002583
Aart Bik0e54c012016-03-04 12:08:31 -08002584// 1.8.
2585UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2586UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2587UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2588UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2589UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002590
Aart Bik2f9fcc92016-03-01 15:16:54 -08002591UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002592
2593#undef __
2594
2595} // namespace mips64
2596} // namespace art