blob: 8d5be802021ff645cb9e167ba9693f02ba9e5662 [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Chris Larsen9701c2e2015-09-04 17:22:47 -070049#define __ codegen->GetAssembler()->
50
51static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070053 CodeGeneratorMIPS64* codegen) {
54 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070056 return;
57 }
58
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010061 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070062 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
63 if (trg_reg != V0) {
64 __ Move(V0, trg_reg);
65 }
66 } else {
67 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
68 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070070 __ MovS(F0, trg_reg);
71 } else {
72 __ MovD(F0, trg_reg);
73 }
74 }
75 }
76}
77
78static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
79 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
80 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
81}
82
83// Slow-path for fallback (calling the managed code to handle the
84// intrinsic) in an intrinsified call. This will copy the arguments
85// into the positions for a regular call.
86//
87// Note: The actual parameters are required to be in the locations
88// given by the invoke's location summary. If an intrinsic
89// modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
92 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000093 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
94 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070095
96 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
97 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
98
99 __ Bind(GetEntryLabel());
100
101 SaveLiveRegisters(codegen, invoke_->GetLocations());
102
103 MoveArguments(invoke_, codegen);
104
105 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100106 codegen->GenerateStaticOrDirectCall(
107 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700108 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100109 codegen->GenerateVirtualCall(
110 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700111 }
112
113 // Copy the result back to the expected output.
114 Location out = invoke_->GetLocations()->Out();
115 if (out.IsValid()) {
116 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
117 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
118 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
119 }
120
121 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700122 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700123 }
124
125 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
126
127 private:
128 // The instruction where this slow path is happening.
129 HInvoke* const invoke_;
130
131 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
132};
133
134#undef __
135
Chris Larsen3039e382015-08-26 07:54:08 -0700136bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
137 Dispatch(invoke);
138 LocationSummary* res = invoke->GetLocations();
139 return res != nullptr && res->Intrinsified();
140}
141
142#define __ assembler->
143
Vladimir Markoca6fff82017-10-03 14:49:14 +0100144static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
145 LocationSummary* locations =
146 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
152 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
153 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
154
155 if (is64bit) {
156 __ Dmfc1(out, in);
157 } else {
158 __ Mfc1(out, in);
159 }
160}
161
162// long java.lang.Double.doubleToRawLongBits(double)
163void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100164 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700165}
166
167void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000168 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171// int java.lang.Float.floatToRawIntBits(float)
172void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100173 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700174}
175
176void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000177 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
Vladimir Markoca6fff82017-10-03 14:49:14 +0100180static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
181 LocationSummary* locations =
182 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700183 locations->SetInAt(0, Location::RequiresRegister());
184 locations->SetOut(Location::RequiresFpuRegister());
185}
186
187static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
188 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
189 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
190
191 if (is64bit) {
192 __ Dmtc1(in, out);
193 } else {
194 __ Mtc1(in, out);
195 }
196}
197
198// double java.lang.Double.longBitsToDouble(long)
199void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100200 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700201}
202
203void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000204 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207// float java.lang.Float.intBitsToFloat(int)
208void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100209 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700210}
211
212void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000213 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
Vladimir Markoca6fff82017-10-03 14:49:14 +0100216static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
217 LocationSummary* locations =
218 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100224 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100230 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100250 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100254 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100259 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100263 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100268 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100290 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100299 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100327 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100336 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
343static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100344 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700345 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100346 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700347
348 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
349 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100351 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700352 __ Rotr(out, in, 16);
353 __ Wsbh(out, out);
354 __ Bitswap(out, out);
355 } else {
356 __ Dsbh(out, in);
357 __ Dshd(out, out);
358 __ Dbitswap(out, out);
359 }
360}
361
362// int java.lang.Integer.reverse(int)
363void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100364 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700365}
366
367void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100368 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371// long java.lang.Long.reverse(long)
372void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100373 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700374}
375
376void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100377 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
Vladimir Markoca6fff82017-10-03 14:49:14 +0100380static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
381 LocationSummary* locations =
382 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700383 locations->SetInAt(0, Location::RequiresFpuRegister());
384 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
385}
386
Chris Larsen7fda7852016-04-21 16:00:36 -0700387static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100388 const DataType::Type type,
Chris Larsen7fda7852016-04-21 16:00:36 -0700389 Mips64Assembler* assembler) {
390 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
391 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100393 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700394
395 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
396 //
397 // A generalization of the best bit counting method to integers of
398 // bit-widths up to 128 (parameterized by type T) is this:
399 //
400 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
401 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
402 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
403 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
404 //
405 // For comparison, for 32-bit quantities, this algorithm can be executed
406 // using 20 MIPS instructions (the calls to LoadConst32() generate two
407 // machine instructions each for the values being used in this algorithm).
408 // A(n unrolled) loop-based algorithm requires 25 instructions.
409 //
410 // For a 64-bit operand this can be performed in 24 instructions compared
411 // to a(n unrolled) loop based algorithm which requires 38 instructions.
412 //
413 // There are algorithms which are faster in the cases where very few
414 // bits are set but the algorithm here attempts to minimize the total
415 // number of instructions executed even when a large number of bits
416 // are set.
417
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100418 if (type == DataType::Type::kInt32) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700419 __ Srl(TMP, in, 1);
420 __ LoadConst32(AT, 0x55555555);
421 __ And(TMP, TMP, AT);
422 __ Subu(TMP, in, TMP);
423 __ LoadConst32(AT, 0x33333333);
424 __ And(out, TMP, AT);
425 __ Srl(TMP, TMP, 2);
426 __ And(TMP, TMP, AT);
427 __ Addu(TMP, out, TMP);
428 __ Srl(out, TMP, 4);
429 __ Addu(out, out, TMP);
430 __ LoadConst32(AT, 0x0F0F0F0F);
431 __ And(out, out, AT);
432 __ LoadConst32(TMP, 0x01010101);
433 __ MulR6(out, out, TMP);
434 __ Srl(out, out, 24);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100435 } else if (type == DataType::Type::kInt64) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700436 __ Dsrl(TMP, in, 1);
437 __ LoadConst64(AT, 0x5555555555555555L);
438 __ And(TMP, TMP, AT);
439 __ Dsubu(TMP, in, TMP);
440 __ LoadConst64(AT, 0x3333333333333333L);
441 __ And(out, TMP, AT);
442 __ Dsrl(TMP, TMP, 2);
443 __ And(TMP, TMP, AT);
444 __ Daddu(TMP, out, TMP);
445 __ Dsrl(out, TMP, 4);
446 __ Daddu(out, out, TMP);
447 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
448 __ And(out, out, AT);
449 __ LoadConst64(TMP, 0x0101010101010101L);
450 __ Dmul(out, out, TMP);
451 __ Dsrl32(out, out, 24);
452 }
453}
454
455// int java.lang.Integer.bitCount(int)
456void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100457 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700458}
459
460void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100461 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700462}
463
464// int java.lang.Long.bitCount(long)
465void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100466 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700467}
468
469void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100470 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700471}
472
Chris Larsen0b7ac982015-09-04 12:54:28 -0700473static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
474 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
475 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
476
477 if (is64bit) {
478 __ AbsD(out, in);
479 } else {
480 __ AbsS(out, in);
481 }
482}
483
484// double java.lang.Math.abs(double)
485void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100486 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700487}
488
489void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000490 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700491}
492
493// float java.lang.Math.abs(float)
494void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100495 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700496}
497
498void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000499 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700500}
501
Vladimir Markoca6fff82017-10-03 14:49:14 +0100502static void CreateIntToInt(ArenaAllocator* allocator, HInvoke* invoke) {
503 LocationSummary* locations =
504 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700505 locations->SetInAt(0, Location::RequiresRegister());
506 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
507}
508
509static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
510 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
511 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
512
513 if (is64bit) {
514 __ Dsra32(AT, in, 31);
515 __ Xor(out, in, AT);
516 __ Dsubu(out, out, AT);
517 } else {
518 __ Sra(AT, in, 31);
519 __ Xor(out, in, AT);
520 __ Subu(out, out, AT);
521 }
522}
523
524// int java.lang.Math.abs(int)
525void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100526 CreateIntToInt(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700527}
528
529void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000530 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700531}
532
533// long java.lang.Math.abs(long)
534void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100535 CreateIntToInt(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700536}
537
538void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000539 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700540}
541
542static void GenMinMaxFP(LocationSummary* locations,
543 bool is_min,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100544 DataType::Type type,
Chris Larsen0b7ac982015-09-04 12:54:28 -0700545 Mips64Assembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800546 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
547 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
Chris Larsen0b7ac982015-09-04 12:54:28 -0700548 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
549
Chris Larsenb74353a2015-11-20 09:07:09 -0800550 Mips64Label noNaNs;
551 Mips64Label done;
552 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
553
554 // When Java computes min/max it prefers a NaN to a number; the
555 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
556 // the inputs is a NaN and the other is a valid number, the MIPS
557 // instruction will return the number; Java wants the NaN value
558 // returned. This is why there is extra logic preceding the use of
559 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
560 // NaN, return the NaN, otherwise return the min/max.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100561 if (type == DataType::Type::kFloat64) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800562 __ CmpUnD(FTMP, a, b);
563 __ Bc1eqz(FTMP, &noNaNs);
564
565 // One of the inputs is a NaN
566 __ CmpEqD(ftmp, a, a);
567 // If a == a then b is the NaN, otherwise a is the NaN.
568 __ SelD(ftmp, a, b);
569
570 if (ftmp != out) {
571 __ MovD(out, ftmp);
572 }
573
574 __ Bc(&done);
575
576 __ Bind(&noNaNs);
577
Chris Larsen0b7ac982015-09-04 12:54:28 -0700578 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800579 __ MinD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700580 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800581 __ MaxD(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700582 }
583 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100584 DCHECK_EQ(type, DataType::Type::kFloat32);
Chris Larsenb74353a2015-11-20 09:07:09 -0800585 __ CmpUnS(FTMP, a, b);
586 __ Bc1eqz(FTMP, &noNaNs);
587
588 // One of the inputs is a NaN
589 __ CmpEqS(ftmp, a, a);
590 // If a == a then b is the NaN, otherwise a is the NaN.
591 __ SelS(ftmp, a, b);
592
593 if (ftmp != out) {
594 __ MovS(out, ftmp);
595 }
596
597 __ Bc(&done);
598
599 __ Bind(&noNaNs);
600
Chris Larsen0b7ac982015-09-04 12:54:28 -0700601 if (is_min) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800602 __ MinS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700603 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800604 __ MaxS(out, a, b);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700605 }
606 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800607
608 __ Bind(&done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700609}
610
Vladimir Markoca6fff82017-10-03 14:49:14 +0100611static void CreateFPFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
612 LocationSummary* locations =
613 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700614 locations->SetInAt(0, Location::RequiresFpuRegister());
615 locations->SetInAt(1, Location::RequiresFpuRegister());
616 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
617}
618
619// double java.lang.Math.min(double, double)
620void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100621 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700622}
623
624void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100625 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700626}
627
628// float java.lang.Math.min(float, float)
629void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100630 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700631}
632
633void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100634 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700635}
636
637// double java.lang.Math.max(double, double)
638void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100639 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700640}
641
642void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100643 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat64, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700644}
645
646// float java.lang.Math.max(float, float)
647void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100648 CreateFPFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700649}
650
651void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100652 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, DataType::Type::kFloat32, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700653}
654
655static void GenMinMax(LocationSummary* locations,
656 bool is_min,
657 Mips64Assembler* assembler) {
658 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
659 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
660 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
661
Chris Larsenb74353a2015-11-20 09:07:09 -0800662 if (lhs == rhs) {
663 if (out != lhs) {
664 __ Move(out, lhs);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700665 }
666 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800667 // Some architectures, such as ARM and MIPS (prior to r6), have a
668 // conditional move instruction which only changes the target
669 // (output) register if the condition is true (MIPS prior to r6 had
670 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
671 // change the target (output) register. If the condition is true the
672 // output register gets the contents of the "rs" register; otherwise,
673 // the output register is set to zero. One consequence of this is
674 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
675 // needs to use a pair of SELEQZ/SELNEZ instructions. After
676 // executing this pair of instructions one of the output registers
677 // from the pair will necessarily contain zero. Then the code ORs the
678 // output registers from the SELEQZ/SELNEZ instructions to get the
679 // final result.
680 //
681 // The initial test to see if the output register is same as the
682 // first input register is needed to make sure that value in the
683 // first input register isn't clobbered before we've finished
684 // computing the output value. The logic in the corresponding else
685 // clause performs the same task but makes sure the second input
686 // register isn't clobbered in the event that it's the same register
687 // as the output register; the else clause also handles the case
688 // where the output register is distinct from both the first, and the
689 // second input registers.
690 if (out == lhs) {
691 __ Slt(AT, rhs, lhs);
692 if (is_min) {
693 __ Seleqz(out, lhs, AT);
694 __ Selnez(AT, rhs, AT);
695 } else {
696 __ Selnez(out, lhs, AT);
697 __ Seleqz(AT, rhs, AT);
698 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700699 } else {
Chris Larsenb74353a2015-11-20 09:07:09 -0800700 __ Slt(AT, lhs, rhs);
701 if (is_min) {
702 __ Seleqz(out, rhs, AT);
703 __ Selnez(AT, lhs, AT);
704 } else {
705 __ Selnez(out, rhs, AT);
706 __ Seleqz(AT, lhs, AT);
707 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700708 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800709 __ Or(out, out, AT);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700710 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700711}
712
Vladimir Markoca6fff82017-10-03 14:49:14 +0100713static void CreateIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
714 LocationSummary* locations =
715 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700716 locations->SetInAt(0, Location::RequiresRegister());
717 locations->SetInAt(1, Location::RequiresRegister());
718 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
719}
720
721// int java.lang.Math.min(int, int)
722void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100723 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700724}
725
726void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000727 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700728}
729
730// long java.lang.Math.min(long, long)
731void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100732 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700733}
734
735void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000736 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700737}
738
739// int java.lang.Math.max(int, int)
740void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100741 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700742}
743
744void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000745 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700746}
747
748// long java.lang.Math.max(long, long)
749void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100750 CreateIntIntToIntLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700751}
752
753void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000754 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700755}
756
757// double java.lang.Math.sqrt(double)
758void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100759 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700760}
761
762void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
763 LocationSummary* locations = invoke->GetLocations();
764 Mips64Assembler* assembler = GetAssembler();
765 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
766 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
767
768 __ SqrtD(out, in);
769}
770
Vladimir Markoca6fff82017-10-03 14:49:14 +0100771static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700772 HInvoke* invoke,
773 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100774 LocationSummary* locations =
775 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700776 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700777 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700778}
779
780// double java.lang.Math.rint(double)
781void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100782 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700783}
784
785void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
786 LocationSummary* locations = invoke->GetLocations();
787 Mips64Assembler* assembler = GetAssembler();
788 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
789 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
790
791 __ RintD(out, in);
792}
793
794// double java.lang.Math.floor(double)
795void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100796 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700797}
798
Chris Larsen14500822015-10-01 11:35:18 -0700799const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
800 kPositiveInfinity |
801 kNegativeZero |
802 kNegativeInfinity |
803 kQuietNaN |
804 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700805
Chris Larsen81284372015-10-21 15:28:53 -0700806enum FloatRoundingMode {
807 kFloor,
808 kCeil,
809};
810
811static void GenRoundingMode(LocationSummary* locations,
812 FloatRoundingMode mode,
813 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700814 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
815 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
816
Chris Larsen81284372015-10-21 15:28:53 -0700817 DCHECK_NE(in, out);
818
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700819 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700820
Chris Larsen81284372015-10-21 15:28:53 -0700821 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700822 // if in.isNaN || in.isInfinite || in.isZero {
823 // return in;
824 // }
825 __ ClassD(out, in);
826 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700827 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700828 __ MovD(out, in);
829 __ Bnezc(AT, &done);
830
Chris Larsen81284372015-10-21 15:28:53 -0700831 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200832 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700833 // // floor()/ceil() has almost certainly returned a value
834 // // which can't be successfully represented as a signed
835 // // 64-bit number. Java expects that the input value will
836 // // be returned in these cases.
837 // // There is also a small probability that floor(in)/ceil(in)
838 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200839 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
840 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700841 // return in;
842 // }
Chris Larsen81284372015-10-21 15:28:53 -0700843 if (mode == kFloor) {
844 __ FloorLD(out, in);
845 } else if (mode == kCeil) {
846 __ CeilLD(out, in);
847 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700848 __ Dmfc1(AT, out);
849 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200850 __ Daddiu(TMP, AT, 1);
851 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
852 // or AT - 0x7FFF FFFF FFFF FFFF.
853 // IOW, TMP = 1 if AT = Long.MIN_VALUE
854 // or TMP = 0 if AT = Long.MAX_VALUE.
855 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
856 // or AT = Long.MAX_VALUE.
857 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700858
859 // double out = outLong;
860 // return out;
861 __ Dmtc1(AT, out);
862 __ Cvtdl(out, out);
863 __ Bind(&done);
864 // }
865}
866
Chris Larsen81284372015-10-21 15:28:53 -0700867void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
868 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
869}
870
Chris Larsen0b7ac982015-09-04 12:54:28 -0700871// double java.lang.Math.ceil(double)
872void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100873 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700874}
875
876void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700877 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700878}
879
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100880static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700881 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
882 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
883 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
884
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100885 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700886
887 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700888
Chris Larsen7adaab02016-04-21 14:49:20 -0700889 // out = floor(in);
890 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200891 // if (out != MAX_VALUE && out != MIN_VALUE) {
892 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700893 // return out += TMP;
894 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200895 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700896
897 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100898 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700899 __ FloorLD(FTMP, in);
900 __ Dmfc1(out, FTMP);
901 } else {
902 __ FloorWS(FTMP, in);
903 __ Mfc1(out, FTMP);
904 }
905
Lena Djokicf4e23a82017-05-09 15:43:45 +0200906 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100907 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200908 __ Daddiu(TMP, out, 1);
909 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
910 // or out - 0x7FFF FFFF FFFF FFFF.
911 // IOW, TMP = 1 if out = Long.MIN_VALUE
912 // or TMP = 0 if out = Long.MAX_VALUE.
913 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
914 // or out = Long.MAX_VALUE.
915 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700916 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200917 __ Addiu(TMP, out, 1);
918 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
919 // or out - 0x7FFF FFFF.
920 // IOW, TMP = 1 if out = Int.MIN_VALUE
921 // or TMP = 0 if out = Int.MAX_VALUE.
922 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
923 // or out = Int.MAX_VALUE.
924 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700925 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700926
927 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100928 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700929 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
930 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
931 __ SubD(FTMP, in, FTMP);
932 __ Dmtc1(AT, half);
933 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200934 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700935 } else {
936 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
937 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
938 __ SubS(FTMP, in, FTMP);
939 __ Mtc1(AT, half);
940 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200941 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700942 }
943
Chris Larsen7adaab02016-04-21 14:49:20 -0700944 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100945 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200946 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700947 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200948 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700949 }
950
951 __ Bind(&done);
952}
953
954// int java.lang.Math.round(float)
955void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100956 LocationSummary* locations =
957 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700958 locations->SetInAt(0, Location::RequiresFpuRegister());
959 locations->AddTemp(Location::RequiresFpuRegister());
960 locations->SetOut(Location::RequiresRegister());
961}
962
963void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100964 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700965}
966
967// long java.lang.Math.round(double)
968void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100969 LocationSummary* locations =
970 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700971 locations->SetInAt(0, Location::RequiresFpuRegister());
972 locations->AddTemp(Location::RequiresFpuRegister());
973 locations->SetOut(Location::RequiresRegister());
974}
975
976void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100977 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700978}
979
Chris Larsen70fb1f42015-09-04 10:15:27 -0700980// byte libcore.io.Memory.peekByte(long address)
981void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100982 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700983}
984
985void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
986 Mips64Assembler* assembler = GetAssembler();
987 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
988 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
989
990 __ Lb(out, adr, 0);
991}
992
993// short libcore.io.Memory.peekShort(long address)
994void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100995 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700996}
997
998void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
999 Mips64Assembler* assembler = GetAssembler();
1000 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1001 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1002
1003 __ Lh(out, adr, 0);
1004}
1005
1006// int libcore.io.Memory.peekInt(long address)
1007void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001008 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001009}
1010
1011void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
1012 Mips64Assembler* assembler = GetAssembler();
1013 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1014 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1015
1016 __ Lw(out, adr, 0);
1017}
1018
1019// long libcore.io.Memory.peekLong(long address)
1020void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001021 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001022}
1023
1024void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
1025 Mips64Assembler* assembler = GetAssembler();
1026 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1027 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1028
1029 __ Ld(out, adr, 0);
1030}
1031
Vladimir Markoca6fff82017-10-03 14:49:14 +01001032static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
1033 LocationSummary* locations =
1034 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001035 locations->SetInAt(0, Location::RequiresRegister());
1036 locations->SetInAt(1, Location::RequiresRegister());
1037}
1038
1039// void libcore.io.Memory.pokeByte(long address, byte value)
1040void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001041 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001042}
1043
1044void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
1045 Mips64Assembler* assembler = GetAssembler();
1046 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1047 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1048
1049 __ Sb(val, adr, 0);
1050}
1051
1052// void libcore.io.Memory.pokeShort(long address, short value)
1053void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001054 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001055}
1056
1057void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
1058 Mips64Assembler* assembler = GetAssembler();
1059 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1060 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1061
1062 __ Sh(val, adr, 0);
1063}
1064
1065// void libcore.io.Memory.pokeInt(long address, int value)
1066void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001067 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001068}
1069
1070void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
1071 Mips64Assembler* assembler = GetAssembler();
1072 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1073 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1074
1075 __ Sw(val, adr, 00);
1076}
1077
1078// void libcore.io.Memory.pokeLong(long address, long value)
1079void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001080 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -07001081}
1082
1083void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
1084 Mips64Assembler* assembler = GetAssembler();
1085 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
1086 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
1087
1088 __ Sd(val, adr, 0);
1089}
1090
Chris Larsen49e55392015-09-04 16:04:03 -07001091// Thread java.lang.Thread.currentThread()
1092void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001093 LocationSummary* locations =
1094 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -07001095 locations->SetOut(Location::RequiresRegister());
1096}
1097
1098void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
1099 Mips64Assembler* assembler = GetAssembler();
1100 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
1101
1102 __ LoadFromOffset(kLoadUnsignedWord,
1103 out,
1104 TR,
1105 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
1106}
1107
Vladimir Markoca6fff82017-10-03 14:49:14 +01001108static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -08001109 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001110 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -08001111 bool can_call = kEmitCompilerReadBarrier &&
1112 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1113 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001114 LocationSummary* locations =
1115 new (allocator) LocationSummary(invoke,
1116 can_call
1117 ? LocationSummary::kCallOnSlowPath
1118 : LocationSummary::kNoCall,
1119 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001120 if (can_call && kUseBakerReadBarrier) {
1121 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1122 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001123 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1124 locations->SetInAt(1, Location::RequiresRegister());
1125 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001126 locations->SetOut(Location::RequiresRegister(),
1127 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001128 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -08001129 // We need a temporary register for the read barrier marking slow
1130 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
1131 locations->AddTemp(Location::RequiresRegister());
1132 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001133}
1134
Alexey Frunze15958152017-02-09 19:08:30 -08001135// Note that the caller must supply a properly aligned memory address.
1136// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001137static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001138 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001139 bool is_volatile,
1140 CodeGeneratorMIPS64* codegen) {
1141 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001142 DCHECK((type == DataType::Type::kInt32) ||
1143 (type == DataType::Type::kInt64) ||
1144 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -07001145 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001146 // Target register.
1147 Location trg_loc = locations->Out();
1148 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001149 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001150 Location base_loc = locations->InAt(1);
1151 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001152 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001153 Location offset_loc = locations->InAt(2);
1154 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -07001155
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001156 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -08001157 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -07001158 }
Alexey Frunze15958152017-02-09 19:08:30 -08001159
Chris Larsen1360ada2015-09-04 23:38:16 -07001160 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001161 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -08001162 __ Ld(trg, TMP, 0);
1163 if (is_volatile) {
1164 __ Sync(0);
1165 }
1166 break;
1167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001168 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -07001169 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -08001170 if (is_volatile) {
1171 __ Sync(0);
1172 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001173 break;
1174
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001175 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -08001176 if (kEmitCompilerReadBarrier) {
1177 if (kUseBakerReadBarrier) {
1178 Location temp = locations->GetTemp(0);
1179 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1180 trg_loc,
1181 base,
1182 /* offset */ 0U,
1183 /* index */ offset_loc,
1184 TIMES_1,
1185 temp,
1186 /* needs_null_check */ false);
1187 if (is_volatile) {
1188 __ Sync(0);
1189 }
1190 } else {
1191 __ Lwu(trg, TMP, 0);
1192 if (is_volatile) {
1193 __ Sync(0);
1194 }
1195 codegen->GenerateReadBarrierSlow(invoke,
1196 trg_loc,
1197 trg_loc,
1198 base_loc,
1199 /* offset */ 0U,
1200 /* index */ offset_loc);
1201 }
1202 } else {
1203 __ Lwu(trg, TMP, 0);
1204 if (is_volatile) {
1205 __ Sync(0);
1206 }
1207 __ MaybeUnpoisonHeapReference(trg);
1208 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001209 break;
1210
1211 default:
1212 LOG(FATAL) << "Unsupported op size " << type;
1213 UNREACHABLE();
1214 }
1215}
1216
1217// int sun.misc.Unsafe.getInt(Object o, long offset)
1218void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001219 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001220}
1221
1222void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001223 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001224}
1225
1226// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1227void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001228 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -07001229}
1230
1231void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001232 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001233}
1234
1235// long sun.misc.Unsafe.getLong(Object o, long offset)
1236void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001237 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001238}
1239
1240void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001241 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001242}
1243
1244// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
1245void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001246 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -07001247}
1248
1249void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001250 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001251}
1252
1253// Object sun.misc.Unsafe.getObject(Object o, long offset)
1254void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001255 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001256}
1257
1258void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001259 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001260}
1261
1262// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1263void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001264 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -07001265}
1266
1267void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001268 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001269}
1270
Vladimir Markoca6fff82017-10-03 14:49:14 +01001271static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
1272 LocationSummary* locations =
1273 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -07001274 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1275 locations->SetInAt(1, Location::RequiresRegister());
1276 locations->SetInAt(2, Location::RequiresRegister());
1277 locations->SetInAt(3, Location::RequiresRegister());
1278}
1279
Alexey Frunze15958152017-02-09 19:08:30 -08001280// Note that the caller must supply a properly aligned memory address.
1281// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -07001282static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001283 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001284 bool is_volatile,
1285 bool is_ordered,
1286 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001287 DCHECK((type == DataType::Type::kInt32) ||
1288 (type == DataType::Type::kInt64) ||
1289 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001290 Mips64Assembler* assembler = codegen->GetAssembler();
1291 // Object pointer.
1292 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1293 // Long offset.
1294 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1295 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1296
1297 __ Daddu(TMP, base, offset);
1298 if (is_volatile || is_ordered) {
1299 __ Sync(0);
1300 }
1301 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001302 case DataType::Type::kInt32:
1303 case DataType::Type::kReference:
1304 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001305 __ PoisonHeapReference(AT, value);
1306 __ Sw(AT, TMP, 0);
1307 } else {
1308 __ Sw(value, TMP, 0);
1309 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001310 break;
1311
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001312 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001313 __ Sd(value, TMP, 0);
1314 break;
1315
1316 default:
1317 LOG(FATAL) << "Unsupported op size " << type;
1318 UNREACHABLE();
1319 }
1320 if (is_volatile) {
1321 __ Sync(0);
1322 }
1323
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001324 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001325 bool value_can_be_null = true; // TODO: Worth finding out this information?
1326 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001327 }
1328}
1329
1330// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1331void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001332 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001333}
1334
1335void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001336 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001337 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001338 /* is_volatile */ false,
1339 /* is_ordered */ false,
1340 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001341}
1342
1343// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1344void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001345 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001346}
1347
1348void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001349 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001350 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001351 /* is_volatile */ false,
1352 /* is_ordered */ true,
1353 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001354}
1355
1356// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1357void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001358 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001359}
1360
1361void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001362 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001363 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001364 /* is_volatile */ true,
1365 /* is_ordered */ false,
1366 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001367}
1368
1369// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1370void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001371 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001372}
1373
1374void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001375 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001376 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001377 /* is_volatile */ false,
1378 /* is_ordered */ false,
1379 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001380}
1381
1382// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1383void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001384 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001385}
1386
1387void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001388 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001389 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001390 /* is_volatile */ false,
1391 /* is_ordered */ true,
1392 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001393}
1394
1395// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1396void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001397 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001398}
1399
1400void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001401 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001402 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001403 /* is_volatile */ true,
1404 /* is_ordered */ false,
1405 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001406}
1407
1408// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1409void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001410 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001411}
1412
1413void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001414 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001415 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001416 /* is_volatile */ false,
1417 /* is_ordered */ false,
1418 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001419}
1420
1421// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1422void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001423 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001424}
1425
1426void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001427 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001428 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001429 /* is_volatile */ false,
1430 /* is_ordered */ true,
1431 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001432}
1433
1434// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1435void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001436 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001437}
1438
1439void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001440 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001441 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001442 /* is_volatile */ true,
1443 /* is_ordered */ false,
1444 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001445}
1446
Vladimir Markoca6fff82017-10-03 14:49:14 +01001447static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001448 bool can_call = kEmitCompilerReadBarrier &&
1449 kUseBakerReadBarrier &&
1450 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001451 LocationSummary* locations =
1452 new (allocator) LocationSummary(invoke,
1453 can_call
1454 ? LocationSummary::kCallOnSlowPath
1455 : LocationSummary::kNoCall,
1456 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001457 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1458 locations->SetInAt(1, Location::RequiresRegister());
1459 locations->SetInAt(2, Location::RequiresRegister());
1460 locations->SetInAt(3, Location::RequiresRegister());
1461 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001462 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001463
1464 // Temporary register used in CAS by (Baker) read barrier.
1465 if (can_call) {
1466 locations->AddTemp(Location::RequiresRegister());
1467 }
Chris Larsen36427492015-10-23 02:19:38 -07001468}
1469
Alexey Frunze15958152017-02-09 19:08:30 -08001470// Note that the caller must supply a properly aligned memory address.
1471// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001472static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001473 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001474 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001475 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001476 Location offset_loc = locations->InAt(2);
1477 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001478 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1479 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001480 Location out_loc = locations->Out();
1481 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001482
1483 DCHECK_NE(base, out);
1484 DCHECK_NE(offset, out);
1485 DCHECK_NE(expected, out);
1486
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001487 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001488 // The only read barrier implementation supporting the
1489 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1490 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1491
1492 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1493 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001494 bool value_can_be_null = true; // TODO: Worth finding out this information?
1495 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001496
1497 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1498 Location temp = locations->GetTemp(0);
1499 // Need to make sure the reference stored in the field is a to-space
1500 // one before attempting the CAS or the CAS could fail incorrectly.
1501 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1502 invoke,
1503 out_loc, // Unused, used only as a "temporary" within the read barrier.
1504 base,
1505 /* offset */ 0u,
1506 /* index */ offset_loc,
1507 ScaleFactor::TIMES_1,
1508 temp,
1509 /* needs_null_check */ false,
1510 /* always_update_field */ true);
1511 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001512 }
1513
Alexey Frunzec061de12017-02-14 13:27:23 -08001514 Mips64Label loop_head, exit_loop;
1515 __ Daddu(TMP, base, offset);
1516
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001517 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001518 __ PoisonHeapReference(expected);
1519 // Do not poison `value`, if it is the same register as
1520 // `expected`, which has just been poisoned.
1521 if (value != expected) {
1522 __ PoisonHeapReference(value);
1523 }
1524 }
1525
Chris Larsen36427492015-10-23 02:19:38 -07001526 // do {
1527 // tmp_value = [tmp_ptr] - expected;
1528 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1529 // result = tmp_value != 0;
1530
Chris Larsen36427492015-10-23 02:19:38 -07001531 __ Sync(0);
1532 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001533 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001534 __ Lld(out, TMP);
1535 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001536 // Note: We will need a read barrier here, when read barrier
1537 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001538 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001539 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001540 // The LL instruction sign-extends the 32-bit value, but
1541 // 32-bit references must be zero-extended. Zero-extend `out`.
1542 __ Dext(out, out, 0, 32);
1543 }
Chris Larsen36427492015-10-23 02:19:38 -07001544 }
1545 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1546 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1547 __ Beqzc(out, &exit_loop); // return.
1548 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1549 // If we use 'value' directly, we would lose 'value'
1550 // in the case that the store fails. Whether the
1551 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001552 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001553 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001554 __ Scd(out, TMP);
1555 } else {
1556 __ Sc(out, TMP);
1557 }
1558 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1559 // cycle atomically then retry.
1560 __ Bind(&exit_loop);
1561 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001562
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001563 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001564 __ UnpoisonHeapReference(expected);
1565 // Do not unpoison `value`, if it is the same register as
1566 // `expected`, which has just been unpoisoned.
1567 if (value != expected) {
1568 __ UnpoisonHeapReference(value);
1569 }
1570 }
Chris Larsen36427492015-10-23 02:19:38 -07001571}
1572
1573// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1574void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001575 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001576}
1577
1578void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001579 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001580}
1581
1582// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1583void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001584 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001585}
1586
1587void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001588 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001589}
1590
1591// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1592void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001593 // The only read barrier implementation supporting the
1594 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1595 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1596 return;
1597 }
1598
Vladimir Markoca6fff82017-10-03 14:49:14 +01001599 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001600}
1601
1602void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001603 // The only read barrier implementation supporting the
1604 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1605 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1606
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001607 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001608}
1609
Chris Larsen9701c2e2015-09-04 17:22:47 -07001610// int java.lang.String.compareTo(String anotherString)
1611void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001612 LocationSummary* locations = new (allocator_) LocationSummary(
1613 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001614 InvokeRuntimeCallingConvention calling_convention;
1615 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1616 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001617 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001618 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1619}
1620
1621void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1622 Mips64Assembler* assembler = GetAssembler();
1623 LocationSummary* locations = invoke->GetLocations();
1624
1625 // Note that the null check must have been done earlier.
1626 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1627
1628 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
1629 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1630 codegen_->AddSlowPath(slow_path);
1631 __ Beqzc(argument, slow_path->GetEntryLabel());
1632
Serban Constantinescufc734082016-07-19 17:18:07 +01001633 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001634 __ Bind(slow_path->GetExitLabel());
1635}
1636
Chris Larsen972d6d72015-10-20 11:29:12 -07001637// boolean java.lang.String.equals(Object anObject)
1638void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001639 LocationSummary* locations =
1640 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001641 locations->SetInAt(0, Location::RequiresRegister());
1642 locations->SetInAt(1, Location::RequiresRegister());
1643 locations->SetOut(Location::RequiresRegister());
1644
1645 // Temporary registers to store lengths of strings and for calculations.
1646 locations->AddTemp(Location::RequiresRegister());
1647 locations->AddTemp(Location::RequiresRegister());
1648 locations->AddTemp(Location::RequiresRegister());
1649}
1650
1651void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1652 Mips64Assembler* assembler = GetAssembler();
1653 LocationSummary* locations = invoke->GetLocations();
1654
1655 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1656 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1657 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1658
1659 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1660 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1661 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1662
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001663 Mips64Label loop;
1664 Mips64Label end;
1665 Mips64Label return_true;
1666 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001667
1668 // Get offsets of count, value, and class fields within a string object.
1669 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1670 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1671 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1672
1673 // Note that the null check must have been done earlier.
1674 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1675
1676 // If the register containing the pointer to "this", and the register
1677 // containing the pointer to "anObject" are the same register then
1678 // "this", and "anObject" are the same object and we can
1679 // short-circuit the logic to a true result.
1680 if (str == arg) {
1681 __ LoadConst64(out, 1);
1682 return;
1683 }
1684
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001685 StringEqualsOptimizations optimizations(invoke);
1686 if (!optimizations.GetArgumentNotNull()) {
1687 // Check if input is null, return false if it is.
1688 __ Beqzc(arg, &return_false);
1689 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001690
1691 // Reference equality check, return true if same reference.
1692 __ Beqc(str, arg, &return_true);
1693
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001694 if (!optimizations.GetArgumentIsString()) {
1695 // Instanceof check for the argument by comparing class fields.
1696 // All string objects must have the same type since String cannot be subclassed.
1697 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1698 // If the argument is a string object, its class field must be equal to receiver's class field.
1699 __ Lw(temp1, str, class_offset);
1700 __ Lw(temp2, arg, class_offset);
1701 __ Bnec(temp1, temp2, &return_false);
1702 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001703
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001704 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001705 __ Lw(temp1, str, count_offset);
1706 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001707 // Check if `count` fields are equal, return false if they're not.
1708 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001709 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001710 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1711 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1712 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001713 __ Beqzc(temp1, &return_true);
1714
1715 // Don't overwrite input registers
1716 __ Move(TMP, str);
1717 __ Move(temp3, arg);
1718
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001719 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001720 DCHECK_ALIGNED(value_offset, 8);
1721 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1722
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001723 if (mirror::kUseStringCompression) {
1724 // For string compression, calculate the number of bytes to compare (not chars).
1725 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1726 __ Srl(temp1, temp1, 1); // Extract length.
1727 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1728 }
1729
1730 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1731 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001732 __ Bind(&loop);
1733 __ Ld(out, TMP, value_offset);
1734 __ Ld(temp2, temp3, value_offset);
1735 __ Bnec(out, temp2, &return_false);
1736 __ Daddiu(TMP, TMP, 8);
1737 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001738 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1739 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001740 __ Bgtzc(temp1, &loop);
1741
1742 // Return true and exit the function.
1743 // If loop does not result in returning false, we return true.
1744 __ Bind(&return_true);
1745 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001746 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001747
1748 // Return false and exit the function.
1749 __ Bind(&return_false);
1750 __ LoadConst64(out, 0);
1751 __ Bind(&end);
1752}
1753
Chris Larsen9701c2e2015-09-04 17:22:47 -07001754static void GenerateStringIndexOf(HInvoke* invoke,
1755 Mips64Assembler* assembler,
1756 CodeGeneratorMIPS64* codegen,
1757 ArenaAllocator* allocator,
1758 bool start_at_zero) {
1759 LocationSummary* locations = invoke->GetLocations();
1760 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1761
1762 // Note that the null check must have been done earlier.
1763 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1764
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001765 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1766 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001767 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001768 HInstruction* code_point = invoke->InputAt(1);
1769 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001770 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001771 // Always needs the slow-path. We could directly dispatch to it,
1772 // but this case should be rare, so for simplicity just put the
1773 // full slow-path down and branch unconditionally.
1774 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1775 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001776 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001777 __ Bind(slow_path->GetExitLabel());
1778 return;
1779 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001780 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001781 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1782 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
1783 slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke);
1784 codegen->AddSlowPath(slow_path);
1785 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1786 }
1787
1788 if (start_at_zero) {
1789 DCHECK_EQ(tmp_reg, A2);
1790 // Start-index = 0.
1791 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001792 }
1793
Serban Constantinescufc734082016-07-19 17:18:07 +01001794 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001795 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001796
1797 if (slow_path != nullptr) {
1798 __ Bind(slow_path->GetExitLabel());
1799 }
1800}
1801
1802// int java.lang.String.indexOf(int ch)
1803void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001804 LocationSummary* locations = new (allocator_) LocationSummary(
1805 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001806 // We have a hand-crafted assembly stub that follows the runtime
1807 // calling convention. So it's best to align the inputs accordingly.
1808 InvokeRuntimeCallingConvention calling_convention;
1809 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1810 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001811 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001812 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1813
1814 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1815 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1816}
1817
1818void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001819 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001820}
1821
1822// int java.lang.String.indexOf(int ch, int fromIndex)
1823void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001824 LocationSummary* locations = new (allocator_) LocationSummary(
1825 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001826 // We have a hand-crafted assembly stub that follows the runtime
1827 // calling convention. So it's best to align the inputs accordingly.
1828 InvokeRuntimeCallingConvention calling_convention;
1829 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1830 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1831 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001832 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001833 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1834}
1835
1836void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001837 GenerateStringIndexOf(
1838 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001839}
1840
Roland Levillaincc3839c2016-02-29 16:23:48 +00001841// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001842void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001843 LocationSummary* locations = new (allocator_) LocationSummary(
1844 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001845 InvokeRuntimeCallingConvention calling_convention;
1846 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1847 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1848 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1849 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001850 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001851 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1852}
1853
1854void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1855 Mips64Assembler* assembler = GetAssembler();
1856 LocationSummary* locations = invoke->GetLocations();
1857
1858 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
1859 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1860 codegen_->AddSlowPath(slow_path);
1861 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1862
Serban Constantinescufc734082016-07-19 17:18:07 +01001863 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001864 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001865 __ Bind(slow_path->GetExitLabel());
1866}
1867
Roland Levillaincc3839c2016-02-29 16:23:48 +00001868// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001869void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001870 LocationSummary* locations =
1871 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001872 InvokeRuntimeCallingConvention calling_convention;
1873 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1874 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1875 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001876 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001877 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1878}
1879
1880void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001881 // No need to emit code checking whether `locations->InAt(2)` is a null
1882 // pointer, as callers of the native method
1883 //
1884 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1885 //
1886 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001887 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001888 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001889}
1890
Roland Levillainf969a202016-03-09 16:14:00 +00001891// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001892void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001893 LocationSummary* locations = new (allocator_) LocationSummary(
1894 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001895 InvokeRuntimeCallingConvention calling_convention;
1896 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001897 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001898 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1899}
1900
1901void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1902 Mips64Assembler* assembler = GetAssembler();
1903 LocationSummary* locations = invoke->GetLocations();
1904
1905 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
1906 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
1907 codegen_->AddSlowPath(slow_path);
1908 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1909
Serban Constantinescufc734082016-07-19 17:18:07 +01001910 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001911 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001912 __ Bind(slow_path->GetExitLabel());
1913}
1914
Chris Larsenddec7f92016-02-16 12:35:04 -08001915static void GenIsInfinite(LocationSummary* locations,
1916 bool is64bit,
1917 Mips64Assembler* assembler) {
1918 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1919 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1920
1921 if (is64bit) {
1922 __ ClassD(FTMP, in);
1923 } else {
1924 __ ClassS(FTMP, in);
1925 }
1926 __ Mfc1(out, FTMP);
1927 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1928 __ Sltu(out, ZERO, out);
1929}
1930
1931// boolean java.lang.Float.isInfinite(float)
1932void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001933 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001934}
1935
1936void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1937 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1938}
1939
1940// boolean java.lang.Double.isInfinite(double)
1941void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001942 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001943}
1944
1945void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1946 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1947}
1948
Chris Larsene3660592016-11-09 11:13:42 -08001949// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1950void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001951 LocationSummary* locations =
1952 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001953 locations->SetInAt(0, Location::RequiresRegister());
1954 locations->SetInAt(1, Location::RequiresRegister());
1955 locations->SetInAt(2, Location::RequiresRegister());
1956 locations->SetInAt(3, Location::RequiresRegister());
1957 locations->SetInAt(4, Location::RequiresRegister());
1958
Chris Larsen366d4332017-03-23 09:02:56 -07001959 locations->AddTemp(Location::RequiresRegister());
1960 locations->AddTemp(Location::RequiresRegister());
1961 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001962}
1963
1964void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1965 Mips64Assembler* assembler = GetAssembler();
1966 LocationSummary* locations = invoke->GetLocations();
1967
1968 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001969 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001970 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001971 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001972
1973 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1974 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1975 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1976 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1977 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1978
1979 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001980 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001981 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001982
1983 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001984 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001985
1986 // Location of data in char array buffer.
1987 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1988
1989 // Get offset of value field within a string object.
1990 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1991
1992 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1993
1994 // Calculate number of characters to be copied.
1995 __ Dsubu(numChrs, srcEnd, srcBegin);
1996
1997 // Calculate destination address.
1998 __ Daddiu(dstPtr, dstObj, data_offset);
1999 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
2000
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002001 if (mirror::kUseStringCompression) {
2002 Mips64Label uncompressed_copy, compressed_loop;
2003 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2004 // Load count field and extract compression flag.
2005 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2006 __ Dext(TMP, TMP, 0, 1);
2007
Chris Larsen366d4332017-03-23 09:02:56 -07002008 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002009 __ Bnezc(TMP, &uncompressed_copy);
2010
2011 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2012 __ Daddu(srcPtr, srcObj, srcBegin);
2013 __ Bind(&compressed_loop);
2014 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2015 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2016 __ Daddiu(numChrs, numChrs, -1);
2017 __ Daddiu(srcPtr, srcPtr, 1);
2018 __ Daddiu(dstPtr, dstPtr, 2);
2019 __ Bnezc(numChrs, &compressed_loop);
2020
2021 __ Bc(&done);
2022 __ Bind(&uncompressed_copy);
2023 }
2024
Chris Larsene3660592016-11-09 11:13:42 -08002025 // Calculate source address.
2026 __ Daddiu(srcPtr, srcObj, value_offset);
2027 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
2028
Chris Larsen366d4332017-03-23 09:02:56 -07002029 __ Bind(&loop);
2030 __ Lh(AT, srcPtr, 0);
2031 __ Daddiu(numChrs, numChrs, -1);
2032 __ Daddiu(srcPtr, srcPtr, char_size);
2033 __ Sh(AT, dstPtr, 0);
2034 __ Daddiu(dstPtr, dstPtr, char_size);
2035 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08002036
2037 __ Bind(&done);
2038}
2039
Chris Larsen5863f852017-03-23 15:41:37 -07002040// static void java.lang.System.arraycopy(Object src, int srcPos,
2041// Object dest, int destPos,
2042// int length)
2043void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2044 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2045 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2046 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2047
2048 // As long as we are checking, we might as well check to see if the src and dest
2049 // positions are >= 0.
2050 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2051 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2052 // We will have to fail anyways.
2053 return;
2054 }
2055
2056 // And since we are already checking, check the length too.
2057 if (length != nullptr) {
2058 int32_t len = length->GetValue();
2059 if (len < 0) {
2060 // Just call as normal.
2061 return;
2062 }
2063 }
2064
2065 // Okay, it is safe to generate inline code.
2066 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002067 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07002068 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2069 locations->SetInAt(0, Location::RequiresRegister());
2070 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2071 locations->SetInAt(2, Location::RequiresRegister());
2072 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2073 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2074
2075 locations->AddTemp(Location::RequiresRegister());
2076 locations->AddTemp(Location::RequiresRegister());
2077 locations->AddTemp(Location::RequiresRegister());
2078}
2079
2080// Utility routine to verify that "length(input) - pos >= length"
2081static void EnoughItems(Mips64Assembler* assembler,
2082 GpuRegister length_input_minus_pos,
2083 Location length,
2084 SlowPathCodeMIPS64* slow_path) {
2085 if (length.IsConstant()) {
2086 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2087
2088 if (IsInt<16>(length_constant)) {
2089 __ Slti(TMP, length_input_minus_pos, length_constant);
2090 __ Bnezc(TMP, slow_path->GetEntryLabel());
2091 } else {
2092 __ LoadConst32(TMP, length_constant);
2093 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2094 }
2095 } else {
2096 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2097 }
2098}
2099
2100static void CheckPosition(Mips64Assembler* assembler,
2101 Location pos,
2102 GpuRegister input,
2103 Location length,
2104 SlowPathCodeMIPS64* slow_path,
2105 bool length_is_input_length = false) {
2106 // Where is the length in the Array?
2107 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2108
2109 // Calculate length(input) - pos.
2110 if (pos.IsConstant()) {
2111 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2112 if (pos_const == 0) {
2113 if (!length_is_input_length) {
2114 // Check that length(input) >= length.
2115 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2116 EnoughItems(assembler, AT, length, slow_path);
2117 }
2118 } else {
2119 // Check that (length(input) - pos) >= zero.
2120 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2121 DCHECK_GT(pos_const, 0);
2122 __ Addiu32(AT, AT, -pos_const);
2123 __ Bltzc(AT, slow_path->GetEntryLabel());
2124
2125 // Verify that (length(input) - pos) >= length.
2126 EnoughItems(assembler, AT, length, slow_path);
2127 }
2128 } else if (length_is_input_length) {
2129 // The only way the copy can succeed is if pos is zero.
2130 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2131 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
2132 } else {
2133 // Verify that pos >= 0.
2134 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
2135 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
2136
2137 // Check that (length(input) - pos) >= zero.
2138 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2139 __ Subu(AT, AT, pos_reg);
2140 __ Bltzc(AT, slow_path->GetEntryLabel());
2141
2142 // Verify that (length(input) - pos) >= length.
2143 EnoughItems(assembler, AT, length, slow_path);
2144 }
2145}
2146
2147void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
2148 Mips64Assembler* assembler = GetAssembler();
2149 LocationSummary* locations = invoke->GetLocations();
2150
2151 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
2152 Location src_pos = locations->InAt(1);
2153 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
2154 Location dest_pos = locations->InAt(3);
2155 Location length = locations->InAt(4);
2156
2157 Mips64Label loop;
2158
2159 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
2160 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
2161 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
2162
2163 SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke);
2164 codegen_->AddSlowPath(slow_path);
2165
2166 // Bail out if the source and destination are the same (to handle overlap).
2167 __ Beqc(src, dest, slow_path->GetEntryLabel());
2168
2169 // Bail out if the source is null.
2170 __ Beqzc(src, slow_path->GetEntryLabel());
2171
2172 // Bail out if the destination is null.
2173 __ Beqzc(dest, slow_path->GetEntryLabel());
2174
2175 // Load length into register for count.
2176 if (length.IsConstant()) {
2177 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2178 } else {
2179 // If the length is negative, bail out.
2180 // We have already checked in the LocationsBuilder for the constant case.
2181 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2182
2183 __ Move(count, length.AsRegister<GpuRegister>());
2184 }
2185
2186 // Validity checks: source.
2187 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2188
2189 // Validity checks: dest.
2190 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2191
2192 // If count is zero, we're done.
2193 __ Beqzc(count, slow_path->GetExitLabel());
2194
2195 // Okay, everything checks out. Finally time to do the copy.
2196 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002197 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002198 DCHECK_EQ(char_size, 2u);
2199
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002200 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07002201
2202 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2203
2204 // Calculate source and destination addresses.
2205 if (src_pos.IsConstant()) {
2206 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2207
2208 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
2209 } else {
2210 __ Daddiu64(src_base, src, data_offset, TMP);
2211 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
2212 }
2213 if (dest_pos.IsConstant()) {
2214 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2215
2216 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2217 } else {
2218 __ Daddiu64(dest_base, dest, data_offset, TMP);
2219 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
2220 }
2221
2222 __ Bind(&loop);
2223 __ Lh(TMP, src_base, 0);
2224 __ Daddiu(src_base, src_base, char_size);
2225 __ Daddiu(count, count, -1);
2226 __ Sh(TMP, dest_base, 0);
2227 __ Daddiu(dest_base, dest_base, char_size);
2228 __ Bnezc(count, &loop);
2229
2230 __ Bind(slow_path->GetExitLabel());
2231}
2232
Chris Larsenab922502016-04-15 10:00:56 -07002233static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002234 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002235 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002236 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002237
2238 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2239 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2240
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002241 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002242 __ Dclz(TMP, in);
2243 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07002244 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002245 } else {
2246 __ Clz(TMP, in);
2247 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07002248 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07002249 }
2250 // For either value of "type", when "in" is zero, "out" should also
2251 // be zero. Without this extra "and" operation, when "in" is zero,
2252 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
2253 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
2254 // the shift amount (TMP) directly; they use either (TMP % 64) or
2255 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07002256 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07002257}
2258
2259// int java.lang.Integer.highestOneBit(int)
2260void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002261 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002262}
2263
2264void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002265 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002266}
2267
2268// long java.lang.Long.highestOneBit(long)
2269void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002270 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002271}
2272
2273void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002274 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002275}
2276
2277static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002278 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002279 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002280 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002281
2282 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2283 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2284
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002285 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002286 __ Dsubu(TMP, ZERO, in);
2287 } else {
2288 __ Subu(TMP, ZERO, in);
2289 }
2290 __ And(out, TMP, in);
2291}
2292
2293// int java.lang.Integer.lowestOneBit(int)
2294void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002295 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002296}
2297
2298void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002299 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002300}
2301
2302// long java.lang.Long.lowestOneBit(long)
2303void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002304 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002305}
2306
2307void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002308 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002309}
2310
Vladimir Markoca6fff82017-10-03 14:49:14 +01002311static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2312 LocationSummary* locations =
2313 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002314 InvokeRuntimeCallingConvention calling_convention;
2315
2316 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002317 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002318}
2319
Vladimir Markoca6fff82017-10-03 14:49:14 +01002320static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2321 LocationSummary* locations =
2322 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002323 InvokeRuntimeCallingConvention calling_convention;
2324
2325 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2326 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002327 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002328}
2329
2330static void GenFPToFPCall(HInvoke* invoke,
2331 CodeGeneratorMIPS64* codegen,
2332 QuickEntrypointEnum entry) {
2333 LocationSummary* locations = invoke->GetLocations();
2334 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2335 DCHECK_EQ(in, F12);
2336 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2337 DCHECK_EQ(out, F0);
2338
2339 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2340}
2341
2342static void GenFPFPToFPCall(HInvoke* invoke,
2343 CodeGeneratorMIPS64* codegen,
2344 QuickEntrypointEnum entry) {
2345 LocationSummary* locations = invoke->GetLocations();
2346 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2347 DCHECK_EQ(in0, F12);
2348 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2349 DCHECK_EQ(in1, F13);
2350 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2351 DCHECK_EQ(out, F0);
2352
2353 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2354}
2355
2356// static double java.lang.Math.cos(double a)
2357void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002358 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002359}
2360
2361void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2362 GenFPToFPCall(invoke, codegen_, kQuickCos);
2363}
2364
2365// static double java.lang.Math.sin(double a)
2366void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002367 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002368}
2369
2370void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2371 GenFPToFPCall(invoke, codegen_, kQuickSin);
2372}
2373
2374// static double java.lang.Math.acos(double a)
2375void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002376 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002377}
2378
2379void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2380 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2381}
2382
2383// static double java.lang.Math.asin(double a)
2384void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002385 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002386}
2387
2388void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2389 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2390}
2391
2392// static double java.lang.Math.atan(double a)
2393void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002394 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002395}
2396
2397void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2398 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2399}
2400
2401// static double java.lang.Math.atan2(double y, double x)
2402void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002403 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002404}
2405
2406void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2407 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2408}
2409
2410// static double java.lang.Math.cbrt(double a)
2411void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002412 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002413}
2414
2415void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2416 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2417}
2418
2419// static double java.lang.Math.cosh(double x)
2420void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002421 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002422}
2423
2424void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2425 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2426}
2427
2428// static double java.lang.Math.exp(double a)
2429void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002430 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002431}
2432
2433void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2434 GenFPToFPCall(invoke, codegen_, kQuickExp);
2435}
2436
2437// static double java.lang.Math.expm1(double x)
2438void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002439 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002440}
2441
2442void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2443 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2444}
2445
2446// static double java.lang.Math.hypot(double x, double y)
2447void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002448 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002449}
2450
2451void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2452 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2453}
2454
2455// static double java.lang.Math.log(double a)
2456void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002457 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002458}
2459
2460void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2461 GenFPToFPCall(invoke, codegen_, kQuickLog);
2462}
2463
2464// static double java.lang.Math.log10(double x)
2465void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002466 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002467}
2468
2469void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2470 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2471}
2472
2473// static double java.lang.Math.nextAfter(double start, double direction)
2474void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002475 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002476}
2477
2478void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2479 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2480}
2481
2482// static double java.lang.Math.sinh(double x)
2483void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002484 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002485}
2486
2487void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2488 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2489}
2490
2491// static double java.lang.Math.tan(double a)
2492void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002493 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002494}
2495
2496void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2497 GenFPToFPCall(invoke, codegen_, kQuickTan);
2498}
2499
2500// static double java.lang.Math.tanh(double x)
2501void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002502 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002503}
2504
2505void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2506 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2507}
2508
Chris Larsen5633ce72017-04-10 15:47:40 -07002509// long java.lang.Integer.valueOf(long)
2510void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2511 InvokeRuntimeCallingConvention calling_convention;
2512 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2513 invoke,
2514 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002515 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002516 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2517}
2518
2519void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2520 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2521 LocationSummary* locations = invoke->GetLocations();
2522 Mips64Assembler* assembler = GetAssembler();
2523 InstructionCodeGeneratorMIPS64* icodegen =
2524 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2525
2526 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2527 InvokeRuntimeCallingConvention calling_convention;
2528 if (invoke->InputAt(0)->IsConstant()) {
2529 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2530 if (value >= info.low && value <= info.high) {
2531 // Just embed the j.l.Integer in the code.
2532 ScopedObjectAccess soa(Thread::Current());
2533 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2534 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2535 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2536 __ LoadConst64(out, address);
2537 } else {
2538 // Allocate and initialize a new j.l.Integer.
2539 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2540 // JIT object table.
2541 uint32_t address =
2542 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2543 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2544 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2545 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2546 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2547 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2548 // one.
2549 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2550 }
2551 } else {
2552 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2553 Mips64Label allocate, done;
2554 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2555
2556 // Is (info.low <= in) && (in <= info.high)?
2557 __ Addiu32(out, in, -info.low);
2558 // As unsigned quantities is out < (info.high - info.low + 1)?
2559 __ LoadConst32(AT, count);
2560 // Branch if out >= (info.high - info.low + 1).
2561 // This means that "in" is outside of the range [info.low, info.high].
2562 __ Bgeuc(out, AT, &allocate);
2563
2564 // If the value is within the bounds, load the j.l.Integer directly from the array.
2565 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2566 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2567 __ LoadConst64(TMP, data_offset + address);
2568 __ Dlsa(out, out, TMP, TIMES_4);
2569 __ Lwu(out, out, 0);
2570 __ MaybeUnpoisonHeapReference(out);
2571 __ Bc(&done);
2572
2573 __ Bind(&allocate);
2574 // Otherwise allocate and initialize a new j.l.Integer.
2575 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2576 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2577 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2578 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2579 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2580 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2581 // one.
2582 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2583 __ Bind(&done);
2584 }
2585}
2586
Aart Bik2f9fcc92016-03-01 15:16:54 -08002587UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002588UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002589
Aart Bikff7d89c2016-11-07 08:49:28 -08002590UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2591UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002592UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2593UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2594UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2595UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2596UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2597UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002598
Aart Bik0e54c012016-03-04 12:08:31 -08002599// 1.8.
2600UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2601UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2602UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2603UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2604UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002605
Nicolas Geoffray365719c2017-03-08 13:11:50 +00002606UNIMPLEMENTED_INTRINSIC(MIPS64, ThreadInterrupted)
2607
Aart Bik2f9fcc92016-03-01 15:16:54 -08002608UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002609
2610#undef __
2611
2612} // namespace mips64
2613} // namespace art