blob: 35335e8757b087c11a4c271d54ec72a34fba998b [file] [log] [blame]
Chris Larsen3039e382015-08-26 07:54:08 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips64.h"
18
19#include "arch/mips64/instruction_set_features_mips64.h"
20#include "art_method.h"
21#include "code_generator_mips64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen3039e382015-08-26 07:54:08 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "thread.h"
30#include "utils/mips64/assembler_mips64.h"
31#include "utils/mips64/constants_mips64.h"
32
33namespace art {
34
35namespace mips64 {
36
37IntrinsicLocationsBuilderMIPS64::IntrinsicLocationsBuilderMIPS64(CodeGeneratorMIPS64* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen3039e382015-08-26 07:54:08 -070039}
40
41Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() {
42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen3039e382015-08-26 07:54:08 -070047}
48
Chris Larsen9701c2e2015-09-04 17:22:47 -070049#define __ codegen->GetAssembler()->
50
51static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052 DataType::Type type,
Chris Larsen9701c2e2015-09-04 17:22:47 -070053 CodeGeneratorMIPS64* codegen) {
54 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070056 return;
57 }
58
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen9701c2e2015-09-04 17:22:47 -070060
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010061 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070062 GpuRegister trg_reg = trg.AsRegister<GpuRegister>();
63 if (trg_reg != V0) {
64 __ Move(V0, trg_reg);
65 }
66 } else {
67 FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>();
68 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 if (type == DataType::Type::kFloat32) {
Chris Larsen9701c2e2015-09-04 17:22:47 -070070 __ MovS(F0, trg_reg);
71 } else {
72 __ MovD(F0, trg_reg);
73 }
74 }
75 }
76}
77
78static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
79 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
80 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
81}
82
83// Slow-path for fallback (calling the managed code to handle the
84// intrinsic) in an intrinsified call. This will copy the arguments
85// into the positions for a regular call.
86//
87// Note: The actual parameters are required to be in the locations
88// given by the invoke's location summary. If an intrinsic
89// modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 {
92 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000093 explicit IntrinsicSlowPathMIPS64(HInvoke* invoke)
94 : SlowPathCodeMIPS64(invoke), invoke_(invoke) { }
Chris Larsen9701c2e2015-09-04 17:22:47 -070095
96 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
97 CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in);
98
99 __ Bind(GetEntryLabel());
100
101 SaveLiveRegisters(codegen, invoke_->GetLocations());
102
103 MoveArguments(invoke_, codegen);
104
105 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100106 codegen->GenerateStaticOrDirectCall(
107 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700108 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100109 codegen->GenerateVirtualCall(
110 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen9701c2e2015-09-04 17:22:47 -0700111 }
112
113 // Copy the result back to the expected output.
114 Location out = invoke_->GetLocations()->Out();
115 if (out.IsValid()) {
116 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
117 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
118 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
119 }
120
121 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700122 __ Bc(GetExitLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -0700123 }
124
125 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; }
126
127 private:
128 // The instruction where this slow path is happening.
129 HInvoke* const invoke_;
130
131 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64);
132};
133
134#undef __
135
Chris Larsen3039e382015-08-26 07:54:08 -0700136bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) {
137 Dispatch(invoke);
138 LocationSummary* res = invoke->GetLocations();
139 return res != nullptr && res->Intrinsified();
140}
141
142#define __ assembler->
143
Vladimir Markoca6fff82017-10-03 14:49:14 +0100144static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
145 LocationSummary* locations =
146 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
152 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
153 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
154
155 if (is64bit) {
156 __ Dmfc1(out, in);
157 } else {
158 __ Mfc1(out, in);
159 }
160}
161
162// long java.lang.Double.doubleToRawLongBits(double)
163void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100164 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700165}
166
167void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000168 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700169}
170
171// int java.lang.Float.floatToRawIntBits(float)
172void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100173 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700174}
175
176void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000177 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700178}
179
Vladimir Markoca6fff82017-10-03 14:49:14 +0100180static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
181 LocationSummary* locations =
182 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700183 locations->SetInAt(0, Location::RequiresRegister());
184 locations->SetOut(Location::RequiresFpuRegister());
185}
186
187static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) {
188 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
189 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
190
191 if (is64bit) {
192 __ Dmtc1(in, out);
193 } else {
194 __ Mtc1(in, out);
195 }
196}
197
198// double java.lang.Double.longBitsToDouble(long)
199void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100200 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700201}
202
203void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000204 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700205}
206
207// float java.lang.Float.intBitsToFloat(int)
208void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100209 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700210}
211
212void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000213 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700214}
215
Vladimir Markoca6fff82017-10-03 14:49:14 +0100216static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
217 LocationSummary* locations =
218 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3039e382015-08-26 07:54:08 -0700219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void GenReverseBytes(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100224 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700225 Mips64Assembler* assembler) {
226 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
227 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
228
229 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100230 case DataType::Type::kInt16:
Chris Larsen3039e382015-08-26 07:54:08 -0700231 __ Dsbh(out, in);
232 __ Seh(out, out);
233 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100234 case DataType::Type::kInt32:
Chris Larsen3039e382015-08-26 07:54:08 -0700235 __ Rotr(out, in, 16);
236 __ Wsbh(out, out);
237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100238 case DataType::Type::kInt64:
Chris Larsen3039e382015-08-26 07:54:08 -0700239 __ Dsbh(out, in);
240 __ Dshd(out, out);
241 break;
242 default:
243 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
244 UNREACHABLE();
245 }
246}
247
248// int java.lang.Integer.reverseBytes(int)
249void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100250 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700251}
252
253void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100254 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700255}
256
257// long java.lang.Long.reverseBytes(long)
258void IntrinsicLocationsBuilderMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100259 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700260}
261
262void IntrinsicCodeGeneratorMIPS64::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100263 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700264}
265
266// short java.lang.Short.reverseBytes(short)
267void IntrinsicLocationsBuilderMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100268 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700269}
270
271void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700273}
274
Chris Larsen81284372015-10-21 15:28:53 -0700275static void GenNumberOfLeadingZeroes(LocationSummary* locations,
276 bool is64bit,
277 Mips64Assembler* assembler) {
Chris Larsen3039e382015-08-26 07:54:08 -0700278 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
279 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
280
281 if (is64bit) {
282 __ Dclz(out, in);
283 } else {
284 __ Clz(out, in);
285 }
286}
287
288// int java.lang.Integer.numberOfLeadingZeros(int i)
289void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100290 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700291}
292
293void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000294 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700295}
296
297// int java.lang.Long.numberOfLeadingZeros(long i)
298void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100299 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700300}
301
302void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000303 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700304}
305
Chris Larsen81284372015-10-21 15:28:53 -0700306static void GenNumberOfTrailingZeroes(LocationSummary* locations,
307 bool is64bit,
308 Mips64Assembler* assembler) {
Chris Larsen0646da72015-09-22 16:02:40 -0700309 Location in = locations->InAt(0);
310 Location out = locations->Out();
311
312 if (is64bit) {
313 __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>());
314 __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
315 __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
316 __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
317 } else {
318 __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16);
319 __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
320 __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
321 __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>());
322 }
323}
324
325// int java.lang.Integer.numberOfTrailingZeros(int i)
326void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100327 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700328}
329
330void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000331 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen0646da72015-09-22 16:02:40 -0700332}
333
334// int java.lang.Long.numberOfTrailingZeros(long i)
335void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100336 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen0646da72015-09-22 16:02:40 -0700337}
338
339void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000340 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700341}
342
343static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100344 DataType::Type type,
Chris Larsen3039e382015-08-26 07:54:08 -0700345 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100346 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen3039e382015-08-26 07:54:08 -0700347
348 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
349 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100351 if (type == DataType::Type::kInt32) {
Chris Larsen3039e382015-08-26 07:54:08 -0700352 __ Rotr(out, in, 16);
353 __ Wsbh(out, out);
354 __ Bitswap(out, out);
355 } else {
356 __ Dsbh(out, in);
357 __ Dshd(out, out);
358 __ Dbitswap(out, out);
359 }
360}
361
362// int java.lang.Integer.reverse(int)
363void IntrinsicLocationsBuilderMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100364 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700365}
366
367void IntrinsicCodeGeneratorMIPS64::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100368 GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700369}
370
371// long java.lang.Long.reverse(long)
372void IntrinsicLocationsBuilderMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100373 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3039e382015-08-26 07:54:08 -0700374}
375
376void IntrinsicCodeGeneratorMIPS64::VisitLongReverse(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100377 GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen3039e382015-08-26 07:54:08 -0700378}
379
Vladimir Markoca6fff82017-10-03 14:49:14 +0100380static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
381 LocationSummary* locations =
382 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700383 locations->SetInAt(0, Location::RequiresFpuRegister());
384 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
385}
386
Chris Larsen7fda7852016-04-21 16:00:36 -0700387static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100388 const DataType::Type type,
Chris Larsen7fda7852016-04-21 16:00:36 -0700389 Mips64Assembler* assembler) {
390 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
391 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100393 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen7fda7852016-04-21 16:00:36 -0700394
395 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
396 //
397 // A generalization of the best bit counting method to integers of
398 // bit-widths up to 128 (parameterized by type T) is this:
399 //
400 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
401 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
402 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
403 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
404 //
405 // For comparison, for 32-bit quantities, this algorithm can be executed
406 // using 20 MIPS instructions (the calls to LoadConst32() generate two
407 // machine instructions each for the values being used in this algorithm).
408 // A(n unrolled) loop-based algorithm requires 25 instructions.
409 //
410 // For a 64-bit operand this can be performed in 24 instructions compared
411 // to a(n unrolled) loop based algorithm which requires 38 instructions.
412 //
413 // There are algorithms which are faster in the cases where very few
414 // bits are set but the algorithm here attempts to minimize the total
415 // number of instructions executed even when a large number of bits
416 // are set.
417
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100418 if (type == DataType::Type::kInt32) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700419 __ Srl(TMP, in, 1);
420 __ LoadConst32(AT, 0x55555555);
421 __ And(TMP, TMP, AT);
422 __ Subu(TMP, in, TMP);
423 __ LoadConst32(AT, 0x33333333);
424 __ And(out, TMP, AT);
425 __ Srl(TMP, TMP, 2);
426 __ And(TMP, TMP, AT);
427 __ Addu(TMP, out, TMP);
428 __ Srl(out, TMP, 4);
429 __ Addu(out, out, TMP);
430 __ LoadConst32(AT, 0x0F0F0F0F);
431 __ And(out, out, AT);
432 __ LoadConst32(TMP, 0x01010101);
433 __ MulR6(out, out, TMP);
434 __ Srl(out, out, 24);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100435 } else if (type == DataType::Type::kInt64) {
Chris Larsen7fda7852016-04-21 16:00:36 -0700436 __ Dsrl(TMP, in, 1);
437 __ LoadConst64(AT, 0x5555555555555555L);
438 __ And(TMP, TMP, AT);
439 __ Dsubu(TMP, in, TMP);
440 __ LoadConst64(AT, 0x3333333333333333L);
441 __ And(out, TMP, AT);
442 __ Dsrl(TMP, TMP, 2);
443 __ And(TMP, TMP, AT);
444 __ Daddu(TMP, out, TMP);
445 __ Dsrl(out, TMP, 4);
446 __ Daddu(out, out, TMP);
447 __ LoadConst64(AT, 0x0F0F0F0F0F0F0F0FL);
448 __ And(out, out, AT);
449 __ LoadConst64(TMP, 0x0101010101010101L);
450 __ Dmul(out, out, TMP);
451 __ Dsrl32(out, out, 24);
452 }
453}
454
455// int java.lang.Integer.bitCount(int)
456void IntrinsicLocationsBuilderMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100457 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700458}
459
460void IntrinsicCodeGeneratorMIPS64::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100461 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700462}
463
464// int java.lang.Long.bitCount(long)
465void IntrinsicLocationsBuilderMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100466 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen7fda7852016-04-21 16:00:36 -0700467}
468
469void IntrinsicCodeGeneratorMIPS64::VisitLongBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100470 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsen7fda7852016-04-21 16:00:36 -0700471}
472
Chris Larsen0b7ac982015-09-04 12:54:28 -0700473// double java.lang.Math.sqrt(double)
474void IntrinsicLocationsBuilderMIPS64::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100475 CreateFPToFPLocations(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700476}
477
478void IntrinsicCodeGeneratorMIPS64::VisitMathSqrt(HInvoke* invoke) {
479 LocationSummary* locations = invoke->GetLocations();
480 Mips64Assembler* assembler = GetAssembler();
481 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
482 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
483
484 __ SqrtD(out, in);
485}
486
Vladimir Markoca6fff82017-10-03 14:49:14 +0100487static void CreateFPToFP(ArenaAllocator* allocator,
Chris Larsen81284372015-10-21 15:28:53 -0700488 HInvoke* invoke,
489 Location::OutputOverlap overlaps = Location::kOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100490 LocationSummary* locations =
491 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700492 locations->SetInAt(0, Location::RequiresFpuRegister());
Chris Larsen81284372015-10-21 15:28:53 -0700493 locations->SetOut(Location::RequiresFpuRegister(), overlaps);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700494}
495
496// double java.lang.Math.rint(double)
497void IntrinsicLocationsBuilderMIPS64::VisitMathRint(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100498 CreateFPToFP(allocator_, invoke, Location::kNoOutputOverlap);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700499}
500
501void IntrinsicCodeGeneratorMIPS64::VisitMathRint(HInvoke* invoke) {
502 LocationSummary* locations = invoke->GetLocations();
503 Mips64Assembler* assembler = GetAssembler();
504 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
505 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
506
507 __ RintD(out, in);
508}
509
510// double java.lang.Math.floor(double)
511void IntrinsicLocationsBuilderMIPS64::VisitMathFloor(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100512 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700513}
514
Chris Larsen14500822015-10-01 11:35:18 -0700515const constexpr uint16_t kFPLeaveUnchanged = kPositiveZero |
516 kPositiveInfinity |
517 kNegativeZero |
518 kNegativeInfinity |
519 kQuietNaN |
520 kSignalingNaN;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700521
Chris Larsen81284372015-10-21 15:28:53 -0700522enum FloatRoundingMode {
523 kFloor,
524 kCeil,
525};
526
527static void GenRoundingMode(LocationSummary* locations,
528 FloatRoundingMode mode,
529 Mips64Assembler* assembler) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700530 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
531 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
532
Chris Larsen81284372015-10-21 15:28:53 -0700533 DCHECK_NE(in, out);
534
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700535 Mips64Label done;
Chris Larsen0b7ac982015-09-04 12:54:28 -0700536
Chris Larsen81284372015-10-21 15:28:53 -0700537 // double floor/ceil(double in) {
Chris Larsen0b7ac982015-09-04 12:54:28 -0700538 // if in.isNaN || in.isInfinite || in.isZero {
539 // return in;
540 // }
541 __ ClassD(out, in);
542 __ Dmfc1(AT, out);
Chris Larsen14500822015-10-01 11:35:18 -0700543 __ Andi(AT, AT, kFPLeaveUnchanged); // +0.0 | +Inf | -0.0 | -Inf | qNaN | sNaN
Chris Larsen0b7ac982015-09-04 12:54:28 -0700544 __ MovD(out, in);
545 __ Bnezc(AT, &done);
546
Chris Larsen81284372015-10-21 15:28:53 -0700547 // Long outLong = floor/ceil(in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200548 // if (outLong == Long.MAX_VALUE) || (outLong == Long.MIN_VALUE) {
Chris Larsen81284372015-10-21 15:28:53 -0700549 // // floor()/ceil() has almost certainly returned a value
550 // // which can't be successfully represented as a signed
551 // // 64-bit number. Java expects that the input value will
552 // // be returned in these cases.
553 // // There is also a small probability that floor(in)/ceil(in)
554 // // correctly truncates/rounds up the input value to
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200555 // // Long.MAX_VALUE or Long.MIN_VALUE. In these cases, this
556 // // exception handling code still does the correct thing.
Chris Larsen0b7ac982015-09-04 12:54:28 -0700557 // return in;
558 // }
Chris Larsen81284372015-10-21 15:28:53 -0700559 if (mode == kFloor) {
560 __ FloorLD(out, in);
561 } else if (mode == kCeil) {
562 __ CeilLD(out, in);
563 }
Chris Larsen0b7ac982015-09-04 12:54:28 -0700564 __ Dmfc1(AT, out);
565 __ MovD(out, in);
Goran Jakovljevic716d0732017-04-07 11:18:59 +0200566 __ Daddiu(TMP, AT, 1);
567 __ Dati(TMP, 0x8000); // TMP = AT + 0x8000 0000 0000 0001
568 // or AT - 0x7FFF FFFF FFFF FFFF.
569 // IOW, TMP = 1 if AT = Long.MIN_VALUE
570 // or TMP = 0 if AT = Long.MAX_VALUE.
571 __ Dsrl(TMP, TMP, 1); // TMP = 0 if AT = Long.MIN_VALUE
572 // or AT = Long.MAX_VALUE.
573 __ Beqzc(TMP, &done);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700574
575 // double out = outLong;
576 // return out;
577 __ Dmtc1(AT, out);
578 __ Cvtdl(out, out);
579 __ Bind(&done);
580 // }
581}
582
Chris Larsen81284372015-10-21 15:28:53 -0700583void IntrinsicCodeGeneratorMIPS64::VisitMathFloor(HInvoke* invoke) {
584 GenRoundingMode(invoke->GetLocations(), kFloor, GetAssembler());
585}
586
Chris Larsen0b7ac982015-09-04 12:54:28 -0700587// double java.lang.Math.ceil(double)
588void IntrinsicLocationsBuilderMIPS64::VisitMathCeil(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100589 CreateFPToFP(allocator_, invoke);
Chris Larsen0b7ac982015-09-04 12:54:28 -0700590}
591
592void IntrinsicCodeGeneratorMIPS64::VisitMathCeil(HInvoke* invoke) {
Chris Larsen81284372015-10-21 15:28:53 -0700593 GenRoundingMode(invoke->GetLocations(), kCeil, GetAssembler());
Chris Larsen0b7ac982015-09-04 12:54:28 -0700594}
595
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100596static void GenRound(LocationSummary* locations, Mips64Assembler* assembler, DataType::Type type) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700597 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
598 FpuRegister half = locations->GetTemp(0).AsFpuRegister<FpuRegister>();
599 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
600
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100601 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700602
603 Mips64Label done;
Chris Larsen7adaab02016-04-21 14:49:20 -0700604
Chris Larsen7adaab02016-04-21 14:49:20 -0700605 // out = floor(in);
606 //
Lena Djokicf4e23a82017-05-09 15:43:45 +0200607 // if (out != MAX_VALUE && out != MIN_VALUE) {
608 // TMP = ((in - out) >= 0.5) ? 1 : 0;
Chris Larsen7adaab02016-04-21 14:49:20 -0700609 // return out += TMP;
610 // }
Lena Djokicf4e23a82017-05-09 15:43:45 +0200611 // return out;
Chris Larsen7adaab02016-04-21 14:49:20 -0700612
613 // out = floor(in);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100614 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700615 __ FloorLD(FTMP, in);
616 __ Dmfc1(out, FTMP);
617 } else {
618 __ FloorWS(FTMP, in);
619 __ Mfc1(out, FTMP);
620 }
621
Lena Djokicf4e23a82017-05-09 15:43:45 +0200622 // if (out != MAX_VALUE && out != MIN_VALUE)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100623 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200624 __ Daddiu(TMP, out, 1);
625 __ Dati(TMP, 0x8000); // TMP = out + 0x8000 0000 0000 0001
626 // or out - 0x7FFF FFFF FFFF FFFF.
627 // IOW, TMP = 1 if out = Long.MIN_VALUE
628 // or TMP = 0 if out = Long.MAX_VALUE.
629 __ Dsrl(TMP, TMP, 1); // TMP = 0 if out = Long.MIN_VALUE
630 // or out = Long.MAX_VALUE.
631 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700632 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200633 __ Addiu(TMP, out, 1);
634 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
635 // or out - 0x7FFF FFFF.
636 // IOW, TMP = 1 if out = Int.MIN_VALUE
637 // or TMP = 0 if out = Int.MAX_VALUE.
638 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
639 // or out = Int.MAX_VALUE.
640 __ Beqzc(TMP, &done);
Chris Larsen7adaab02016-04-21 14:49:20 -0700641 }
Chris Larsen7adaab02016-04-21 14:49:20 -0700642
643 // TMP = (0.5 <= (in - out)) ? -1 : 0;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100644 if (type == DataType::Type::kFloat64) {
Chris Larsen7adaab02016-04-21 14:49:20 -0700645 __ Cvtdl(FTMP, FTMP); // Convert output of floor.l.d back to "double".
646 __ LoadConst64(AT, bit_cast<int64_t, double>(0.5));
647 __ SubD(FTMP, in, FTMP);
648 __ Dmtc1(AT, half);
649 __ CmpLeD(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200650 __ Dmfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700651 } else {
652 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
653 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
654 __ SubS(FTMP, in, FTMP);
655 __ Mtc1(AT, half);
656 __ CmpLeS(FTMP, half, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +0200657 __ Mfc1(TMP, FTMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700658 }
659
Chris Larsen7adaab02016-04-21 14:49:20 -0700660 // Return out -= TMP.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100661 if (type == DataType::Type::kFloat64) {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200662 __ Dsubu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700663 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +0200664 __ Subu(out, out, TMP);
Chris Larsen7adaab02016-04-21 14:49:20 -0700665 }
666
667 __ Bind(&done);
668}
669
670// int java.lang.Math.round(float)
671void IntrinsicLocationsBuilderMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100672 LocationSummary* locations =
673 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700674 locations->SetInAt(0, Location::RequiresFpuRegister());
675 locations->AddTemp(Location::RequiresFpuRegister());
676 locations->SetOut(Location::RequiresRegister());
677}
678
679void IntrinsicCodeGeneratorMIPS64::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100680 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat32);
Chris Larsen7adaab02016-04-21 14:49:20 -0700681}
682
683// long java.lang.Math.round(double)
684void IntrinsicLocationsBuilderMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100685 LocationSummary* locations =
686 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen7adaab02016-04-21 14:49:20 -0700687 locations->SetInAt(0, Location::RequiresFpuRegister());
688 locations->AddTemp(Location::RequiresFpuRegister());
689 locations->SetOut(Location::RequiresRegister());
690}
691
692void IntrinsicCodeGeneratorMIPS64::VisitMathRoundDouble(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100693 GenRound(invoke->GetLocations(), GetAssembler(), DataType::Type::kFloat64);
Chris Larsen7adaab02016-04-21 14:49:20 -0700694}
695
Chris Larsen70fb1f42015-09-04 10:15:27 -0700696// byte libcore.io.Memory.peekByte(long address)
697void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100698 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700699}
700
701void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekByte(HInvoke* invoke) {
702 Mips64Assembler* assembler = GetAssembler();
703 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
704 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
705
706 __ Lb(out, adr, 0);
707}
708
709// short libcore.io.Memory.peekShort(long address)
710void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100711 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700712}
713
714void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekShortNative(HInvoke* invoke) {
715 Mips64Assembler* assembler = GetAssembler();
716 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
717 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
718
719 __ Lh(out, adr, 0);
720}
721
722// int libcore.io.Memory.peekInt(long address)
723void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100724 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700725}
726
727void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekIntNative(HInvoke* invoke) {
728 Mips64Assembler* assembler = GetAssembler();
729 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
730 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
731
732 __ Lw(out, adr, 0);
733}
734
735// long libcore.io.Memory.peekLong(long address)
736void IntrinsicLocationsBuilderMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100737 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700738}
739
740void IntrinsicCodeGeneratorMIPS64::VisitMemoryPeekLongNative(HInvoke* invoke) {
741 Mips64Assembler* assembler = GetAssembler();
742 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
743 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
744
745 __ Ld(out, adr, 0);
746}
747
Vladimir Markoca6fff82017-10-03 14:49:14 +0100748static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
749 LocationSummary* locations =
750 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700751 locations->SetInAt(0, Location::RequiresRegister());
752 locations->SetInAt(1, Location::RequiresRegister());
753}
754
755// void libcore.io.Memory.pokeByte(long address, byte value)
756void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100757 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700758}
759
760void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeByte(HInvoke* invoke) {
761 Mips64Assembler* assembler = GetAssembler();
762 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
763 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
764
765 __ Sb(val, adr, 0);
766}
767
768// void libcore.io.Memory.pokeShort(long address, short value)
769void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100770 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700771}
772
773void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeShortNative(HInvoke* invoke) {
774 Mips64Assembler* assembler = GetAssembler();
775 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
776 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
777
778 __ Sh(val, adr, 0);
779}
780
781// void libcore.io.Memory.pokeInt(long address, int value)
782void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100783 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700784}
785
786void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeIntNative(HInvoke* invoke) {
787 Mips64Assembler* assembler = GetAssembler();
788 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
789 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
790
791 __ Sw(val, adr, 00);
792}
793
794// void libcore.io.Memory.pokeLong(long address, long value)
795void IntrinsicLocationsBuilderMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100796 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen70fb1f42015-09-04 10:15:27 -0700797}
798
799void IntrinsicCodeGeneratorMIPS64::VisitMemoryPokeLongNative(HInvoke* invoke) {
800 Mips64Assembler* assembler = GetAssembler();
801 GpuRegister adr = invoke->GetLocations()->InAt(0).AsRegister<GpuRegister>();
802 GpuRegister val = invoke->GetLocations()->InAt(1).AsRegister<GpuRegister>();
803
804 __ Sd(val, adr, 0);
805}
806
Chris Larsen49e55392015-09-04 16:04:03 -0700807// Thread java.lang.Thread.currentThread()
808void IntrinsicLocationsBuilderMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100809 LocationSummary* locations =
810 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen49e55392015-09-04 16:04:03 -0700811 locations->SetOut(Location::RequiresRegister());
812}
813
814void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) {
815 Mips64Assembler* assembler = GetAssembler();
816 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
817
818 __ LoadFromOffset(kLoadUnsignedWord,
819 out,
820 TR,
821 Thread::PeerOffset<kMips64PointerSize>().Int32Value());
822}
823
Vladimir Markoca6fff82017-10-03 14:49:14 +0100824static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -0800825 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100826 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -0800827 bool can_call = kEmitCompilerReadBarrier &&
828 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
829 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100830 LocationSummary* locations =
831 new (allocator) LocationSummary(invoke,
832 can_call
833 ? LocationSummary::kCallOnSlowPath
834 : LocationSummary::kNoCall,
835 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700836 if (can_call && kUseBakerReadBarrier) {
837 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
838 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700839 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
840 locations->SetInAt(1, Location::RequiresRegister());
841 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -0800842 locations->SetOut(Location::RequiresRegister(),
843 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100844 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -0800845 // We need a temporary register for the read barrier marking slow
846 // path in InstructionCodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier.
847 locations->AddTemp(Location::RequiresRegister());
848 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700849}
850
Alexey Frunze15958152017-02-09 19:08:30 -0800851// Note that the caller must supply a properly aligned memory address.
852// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -0700853static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100854 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -0700855 bool is_volatile,
856 CodeGeneratorMIPS64* codegen) {
857 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100858 DCHECK((type == DataType::Type::kInt32) ||
859 (type == DataType::Type::kInt64) ||
860 (type == DataType::Type::kReference)) << type;
Chris Larsen1360ada2015-09-04 23:38:16 -0700861 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -0800862 // Target register.
863 Location trg_loc = locations->Out();
864 GpuRegister trg = trg_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700865 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -0800866 Location base_loc = locations->InAt(1);
867 GpuRegister base = base_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700868 // Long offset.
Alexey Frunze15958152017-02-09 19:08:30 -0800869 Location offset_loc = locations->InAt(2);
870 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen1360ada2015-09-04 23:38:16 -0700871
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100872 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -0800873 __ Daddu(TMP, base, offset);
Chris Larsen1360ada2015-09-04 23:38:16 -0700874 }
Alexey Frunze15958152017-02-09 19:08:30 -0800875
Chris Larsen1360ada2015-09-04 23:38:16 -0700876 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100877 case DataType::Type::kInt64:
Alexey Frunze15958152017-02-09 19:08:30 -0800878 __ Ld(trg, TMP, 0);
879 if (is_volatile) {
880 __ Sync(0);
881 }
882 break;
883
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100884 case DataType::Type::kInt32:
Chris Larsen1360ada2015-09-04 23:38:16 -0700885 __ Lw(trg, TMP, 0);
Alexey Frunze15958152017-02-09 19:08:30 -0800886 if (is_volatile) {
887 __ Sync(0);
888 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700889 break;
890
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100891 case DataType::Type::kReference:
Alexey Frunze15958152017-02-09 19:08:30 -0800892 if (kEmitCompilerReadBarrier) {
893 if (kUseBakerReadBarrier) {
894 Location temp = locations->GetTemp(0);
895 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
896 trg_loc,
897 base,
898 /* offset */ 0U,
899 /* index */ offset_loc,
900 TIMES_1,
901 temp,
902 /* needs_null_check */ false);
903 if (is_volatile) {
904 __ Sync(0);
905 }
906 } else {
907 __ Lwu(trg, TMP, 0);
908 if (is_volatile) {
909 __ Sync(0);
910 }
911 codegen->GenerateReadBarrierSlow(invoke,
912 trg_loc,
913 trg_loc,
914 base_loc,
915 /* offset */ 0U,
916 /* index */ offset_loc);
917 }
918 } else {
919 __ Lwu(trg, TMP, 0);
920 if (is_volatile) {
921 __ Sync(0);
922 }
923 __ MaybeUnpoisonHeapReference(trg);
924 }
Chris Larsen1360ada2015-09-04 23:38:16 -0700925 break;
926
927 default:
928 LOG(FATAL) << "Unsupported op size " << type;
929 UNREACHABLE();
930 }
931}
932
933// int sun.misc.Unsafe.getInt(Object o, long offset)
934void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100935 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700936}
937
938void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100939 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700940}
941
942// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
943void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100944 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen1360ada2015-09-04 23:38:16 -0700945}
946
947void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100948 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700949}
950
951// long sun.misc.Unsafe.getLong(Object o, long offset)
952void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100953 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700954}
955
956void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100957 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700958}
959
960// long sun.misc.Unsafe.getLongVolatile(Object o, long offset)
961void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100962 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen1360ada2015-09-04 23:38:16 -0700963}
964
965void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100966 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700967}
968
969// Object sun.misc.Unsafe.getObject(Object o, long offset)
970void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100971 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700972}
973
974void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100975 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700976}
977
978// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
979void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100980 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen1360ada2015-09-04 23:38:16 -0700981}
982
983void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100984 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -0700985}
986
Vladimir Markoca6fff82017-10-03 14:49:14 +0100987static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator, HInvoke* invoke) {
988 LocationSummary* locations =
989 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen1360ada2015-09-04 23:38:16 -0700990 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
991 locations->SetInAt(1, Location::RequiresRegister());
992 locations->SetInAt(2, Location::RequiresRegister());
993 locations->SetInAt(3, Location::RequiresRegister());
994}
995
Alexey Frunze15958152017-02-09 19:08:30 -0800996// Note that the caller must supply a properly aligned memory address.
997// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen1360ada2015-09-04 23:38:16 -0700998static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100999 DataType::Type type,
Chris Larsen1360ada2015-09-04 23:38:16 -07001000 bool is_volatile,
1001 bool is_ordered,
1002 CodeGeneratorMIPS64* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001003 DCHECK((type == DataType::Type::kInt32) ||
1004 (type == DataType::Type::kInt64) ||
1005 (type == DataType::Type::kReference));
Chris Larsen1360ada2015-09-04 23:38:16 -07001006 Mips64Assembler* assembler = codegen->GetAssembler();
1007 // Object pointer.
1008 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
1009 // Long offset.
1010 GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>();
1011 GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>();
1012
1013 __ Daddu(TMP, base, offset);
1014 if (is_volatile || is_ordered) {
1015 __ Sync(0);
1016 }
1017 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001018 case DataType::Type::kInt32:
1019 case DataType::Type::kReference:
1020 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001021 __ PoisonHeapReference(AT, value);
1022 __ Sw(AT, TMP, 0);
1023 } else {
1024 __ Sw(value, TMP, 0);
1025 }
Chris Larsen1360ada2015-09-04 23:38:16 -07001026 break;
1027
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001028 case DataType::Type::kInt64:
Chris Larsen1360ada2015-09-04 23:38:16 -07001029 __ Sd(value, TMP, 0);
1030 break;
1031
1032 default:
1033 LOG(FATAL) << "Unsupported op size " << type;
1034 UNREACHABLE();
1035 }
1036 if (is_volatile) {
1037 __ Sync(0);
1038 }
1039
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001040 if (type == DataType::Type::kReference) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001041 bool value_can_be_null = true; // TODO: Worth finding out this information?
1042 codegen->MarkGCCard(base, value, value_can_be_null);
Chris Larsen1360ada2015-09-04 23:38:16 -07001043 }
1044}
1045
1046// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1047void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001048 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001049}
1050
1051void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001052 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001053 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001054 /* is_volatile */ false,
1055 /* is_ordered */ false,
1056 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001057}
1058
1059// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1060void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001061 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001062}
1063
1064void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001065 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001066 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001067 /* is_volatile */ false,
1068 /* is_ordered */ true,
1069 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001070}
1071
1072// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1073void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001074 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001075}
1076
1077void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001078 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001079 DataType::Type::kInt32,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001080 /* is_volatile */ true,
1081 /* is_ordered */ false,
1082 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001083}
1084
1085// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1086void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001087 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001088}
1089
1090void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001091 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001092 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001093 /* is_volatile */ false,
1094 /* is_ordered */ false,
1095 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001096}
1097
1098// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1099void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001100 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001101}
1102
1103void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001104 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001105 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001106 /* is_volatile */ false,
1107 /* is_ordered */ true,
1108 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001109}
1110
1111// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1112void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001113 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001114}
1115
1116void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001117 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001118 DataType::Type::kReference,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001119 /* is_volatile */ true,
1120 /* is_ordered */ false,
1121 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001122}
1123
1124// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1125void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001126 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001127}
1128
1129void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001130 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001131 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001132 /* is_volatile */ false,
1133 /* is_ordered */ false,
1134 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001135}
1136
1137// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1138void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001139 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001140}
1141
1142void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001143 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001144 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001145 /* is_volatile */ false,
1146 /* is_ordered */ true,
1147 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001148}
1149
1150// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x)
1151void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001152 CreateIntIntIntIntToVoid(allocator_, invoke);
Chris Larsen1360ada2015-09-04 23:38:16 -07001153}
1154
1155void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001156 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001157 DataType::Type::kInt64,
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001158 /* is_volatile */ true,
1159 /* is_ordered */ false,
1160 codegen_);
Chris Larsen1360ada2015-09-04 23:38:16 -07001161}
1162
Vladimir Markoca6fff82017-10-03 14:49:14 +01001163static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001164 bool can_call = kEmitCompilerReadBarrier &&
1165 kUseBakerReadBarrier &&
1166 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001167 LocationSummary* locations =
1168 new (allocator) LocationSummary(invoke,
1169 can_call
1170 ? LocationSummary::kCallOnSlowPath
1171 : LocationSummary::kNoCall,
1172 kIntrinsified);
Chris Larsen36427492015-10-23 02:19:38 -07001173 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1174 locations->SetInAt(1, Location::RequiresRegister());
1175 locations->SetInAt(2, Location::RequiresRegister());
1176 locations->SetInAt(3, Location::RequiresRegister());
1177 locations->SetInAt(4, Location::RequiresRegister());
Chris Larsen36427492015-10-23 02:19:38 -07001178 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001179
1180 // Temporary register used in CAS by (Baker) read barrier.
1181 if (can_call) {
1182 locations->AddTemp(Location::RequiresRegister());
1183 }
Chris Larsen36427492015-10-23 02:19:38 -07001184}
1185
Alexey Frunze15958152017-02-09 19:08:30 -08001186// Note that the caller must supply a properly aligned memory address.
1187// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001188static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS64* codegen) {
Chris Larsen36427492015-10-23 02:19:38 -07001189 Mips64Assembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001190 LocationSummary* locations = invoke->GetLocations();
Chris Larsen36427492015-10-23 02:19:38 -07001191 GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001192 Location offset_loc = locations->InAt(2);
1193 GpuRegister offset = offset_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001194 GpuRegister expected = locations->InAt(3).AsRegister<GpuRegister>();
1195 GpuRegister value = locations->InAt(4).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08001196 Location out_loc = locations->Out();
1197 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Chris Larsen36427492015-10-23 02:19:38 -07001198
1199 DCHECK_NE(base, out);
1200 DCHECK_NE(offset, out);
1201 DCHECK_NE(expected, out);
1202
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001203 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001204 // The only read barrier implementation supporting the
1205 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1206 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1207
1208 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1209 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001210 bool value_can_be_null = true; // TODO: Worth finding out this information?
1211 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001212
1213 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1214 Location temp = locations->GetTemp(0);
1215 // Need to make sure the reference stored in the field is a to-space
1216 // one before attempting the CAS or the CAS could fail incorrectly.
1217 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1218 invoke,
1219 out_loc, // Unused, used only as a "temporary" within the read barrier.
1220 base,
1221 /* offset */ 0u,
1222 /* index */ offset_loc,
1223 ScaleFactor::TIMES_1,
1224 temp,
1225 /* needs_null_check */ false,
1226 /* always_update_field */ true);
1227 }
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001228 }
1229
Alexey Frunzec061de12017-02-14 13:27:23 -08001230 Mips64Label loop_head, exit_loop;
1231 __ Daddu(TMP, base, offset);
1232
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001233 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001234 __ PoisonHeapReference(expected);
1235 // Do not poison `value`, if it is the same register as
1236 // `expected`, which has just been poisoned.
1237 if (value != expected) {
1238 __ PoisonHeapReference(value);
1239 }
1240 }
1241
Chris Larsen36427492015-10-23 02:19:38 -07001242 // do {
1243 // tmp_value = [tmp_ptr] - expected;
1244 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1245 // result = tmp_value != 0;
1246
Chris Larsen36427492015-10-23 02:19:38 -07001247 __ Sync(0);
1248 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001249 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001250 __ Lld(out, TMP);
1251 } else {
Roland Levillain391b8662015-12-18 11:43:38 +00001252 // Note: We will need a read barrier here, when read barrier
1253 // support is added to the MIPS64 back end.
Chris Larsen36427492015-10-23 02:19:38 -07001254 __ Ll(out, TMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001255 if (type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001256 // The LL instruction sign-extends the 32-bit value, but
1257 // 32-bit references must be zero-extended. Zero-extend `out`.
1258 __ Dext(out, out, 0, 32);
1259 }
Chris Larsen36427492015-10-23 02:19:38 -07001260 }
1261 __ Dsubu(out, out, expected); // If we didn't get the 'expected'
1262 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1263 __ Beqzc(out, &exit_loop); // return.
1264 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1265 // If we use 'value' directly, we would lose 'value'
1266 // in the case that the store fails. Whether the
1267 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001268 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001269 if (type == DataType::Type::kInt64) {
Chris Larsen36427492015-10-23 02:19:38 -07001270 __ Scd(out, TMP);
1271 } else {
1272 __ Sc(out, TMP);
1273 }
1274 __ Beqzc(out, &loop_head); // If we couldn't do the read-modify-write
1275 // cycle atomically then retry.
1276 __ Bind(&exit_loop);
1277 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001278
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001279 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001280 __ UnpoisonHeapReference(expected);
1281 // Do not unpoison `value`, if it is the same register as
1282 // `expected`, which has just been unpoisoned.
1283 if (value != expected) {
1284 __ UnpoisonHeapReference(value);
1285 }
1286 }
Chris Larsen36427492015-10-23 02:19:38 -07001287}
1288
1289// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1290void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001291 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001292}
1293
1294void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 GenCas(invoke, DataType::Type::kInt32, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001296}
1297
1298// boolean sun.misc.Unsafe.compareAndSwapLong(Object o, long offset, long expected, long x)
1299void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001300 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001301}
1302
1303void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001304 GenCas(invoke, DataType::Type::kInt64, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001305}
1306
1307// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1308void IntrinsicLocationsBuilderMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001309 // The only read barrier implementation supporting the
1310 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1311 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1312 return;
1313 }
1314
Vladimir Markoca6fff82017-10-03 14:49:14 +01001315 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Chris Larsen36427492015-10-23 02:19:38 -07001316}
1317
1318void IntrinsicCodeGeneratorMIPS64::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001319 // The only read barrier implementation supporting the
1320 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1321 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1322
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001323 GenCas(invoke, DataType::Type::kReference, codegen_);
Chris Larsen36427492015-10-23 02:19:38 -07001324}
1325
Chris Larsen9701c2e2015-09-04 17:22:47 -07001326// int java.lang.String.compareTo(String anotherString)
1327void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001328 LocationSummary* locations = new (allocator_) LocationSummary(
1329 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001330 InvokeRuntimeCallingConvention calling_convention;
1331 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1332 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001333 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001334 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1335}
1336
1337void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) {
1338 Mips64Assembler* assembler = GetAssembler();
1339 LocationSummary* locations = invoke->GetLocations();
1340
1341 // Note that the null check must have been done earlier.
1342 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1343
1344 GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001345 SlowPathCodeMIPS64* slow_path =
1346 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001347 codegen_->AddSlowPath(slow_path);
1348 __ Beqzc(argument, slow_path->GetEntryLabel());
1349
Serban Constantinescufc734082016-07-19 17:18:07 +01001350 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001351 __ Bind(slow_path->GetExitLabel());
1352}
1353
Chris Larsen972d6d72015-10-20 11:29:12 -07001354// boolean java.lang.String.equals(Object anObject)
1355void IntrinsicLocationsBuilderMIPS64::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoda283052017-11-07 21:17:24 +00001356 if (kEmitCompilerReadBarrier &&
1357 !StringEqualsOptimizations(invoke).GetArgumentIsString() &&
1358 !StringEqualsOptimizations(invoke).GetNoReadBarrierForStringClass()) {
1359 // No support for this odd case (String class is moveable, not in the boot image).
1360 return;
1361 }
1362
Vladimir Markoca6fff82017-10-03 14:49:14 +01001363 LocationSummary* locations =
1364 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen972d6d72015-10-20 11:29:12 -07001365 locations->SetInAt(0, Location::RequiresRegister());
1366 locations->SetInAt(1, Location::RequiresRegister());
1367 locations->SetOut(Location::RequiresRegister());
1368
1369 // Temporary registers to store lengths of strings and for calculations.
1370 locations->AddTemp(Location::RequiresRegister());
1371 locations->AddTemp(Location::RequiresRegister());
1372 locations->AddTemp(Location::RequiresRegister());
1373}
1374
1375void IntrinsicCodeGeneratorMIPS64::VisitStringEquals(HInvoke* invoke) {
1376 Mips64Assembler* assembler = GetAssembler();
1377 LocationSummary* locations = invoke->GetLocations();
1378
1379 GpuRegister str = locations->InAt(0).AsRegister<GpuRegister>();
1380 GpuRegister arg = locations->InAt(1).AsRegister<GpuRegister>();
1381 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1382
1383 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
1384 GpuRegister temp2 = locations->GetTemp(1).AsRegister<GpuRegister>();
1385 GpuRegister temp3 = locations->GetTemp(2).AsRegister<GpuRegister>();
1386
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001387 Mips64Label loop;
1388 Mips64Label end;
1389 Mips64Label return_true;
1390 Mips64Label return_false;
Chris Larsen972d6d72015-10-20 11:29:12 -07001391
1392 // Get offsets of count, value, and class fields within a string object.
1393 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1394 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1395 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1396
1397 // Note that the null check must have been done earlier.
1398 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1399
1400 // If the register containing the pointer to "this", and the register
1401 // containing the pointer to "anObject" are the same register then
1402 // "this", and "anObject" are the same object and we can
1403 // short-circuit the logic to a true result.
1404 if (str == arg) {
1405 __ LoadConst64(out, 1);
1406 return;
1407 }
1408
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001409 StringEqualsOptimizations optimizations(invoke);
1410 if (!optimizations.GetArgumentNotNull()) {
1411 // Check if input is null, return false if it is.
1412 __ Beqzc(arg, &return_false);
1413 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001414
1415 // Reference equality check, return true if same reference.
1416 __ Beqc(str, arg, &return_true);
1417
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001418 if (!optimizations.GetArgumentIsString()) {
1419 // Instanceof check for the argument by comparing class fields.
1420 // All string objects must have the same type since String cannot be subclassed.
1421 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1422 // If the argument is a string object, its class field must be equal to receiver's class field.
1423 __ Lw(temp1, str, class_offset);
1424 __ Lw(temp2, arg, class_offset);
1425 __ Bnec(temp1, temp2, &return_false);
1426 }
Chris Larsen972d6d72015-10-20 11:29:12 -07001427
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001428 // Load `count` fields of this and argument strings.
Chris Larsen972d6d72015-10-20 11:29:12 -07001429 __ Lw(temp1, str, count_offset);
1430 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001431 // Check if `count` fields are equal, return false if they're not.
1432 // Also compares the compression style, if differs return false.
Chris Larsen972d6d72015-10-20 11:29:12 -07001433 __ Bnec(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001434 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1435 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1436 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen972d6d72015-10-20 11:29:12 -07001437 __ Beqzc(temp1, &return_true);
1438
1439 // Don't overwrite input registers
1440 __ Move(TMP, str);
1441 __ Move(temp3, arg);
1442
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001443 // Assertions that must hold in order to compare strings 8 bytes at a time.
Chris Larsen972d6d72015-10-20 11:29:12 -07001444 DCHECK_ALIGNED(value_offset, 8);
1445 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1446
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001447 if (mirror::kUseStringCompression) {
1448 // For string compression, calculate the number of bytes to compare (not chars).
1449 __ Dext(temp2, temp1, 0, 1); // Extract compression flag.
1450 __ Srl(temp1, temp1, 1); // Extract length.
1451 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1452 }
1453
1454 // Loop to compare strings 8 bytes at a time starting at the beginning of the string.
1455 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen972d6d72015-10-20 11:29:12 -07001456 __ Bind(&loop);
1457 __ Ld(out, TMP, value_offset);
1458 __ Ld(temp2, temp3, value_offset);
1459 __ Bnec(out, temp2, &return_false);
1460 __ Daddiu(TMP, TMP, 8);
1461 __ Daddiu(temp3, temp3, 8);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001462 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1463 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -8 : -4);
Chris Larsen972d6d72015-10-20 11:29:12 -07001464 __ Bgtzc(temp1, &loop);
1465
1466 // Return true and exit the function.
1467 // If loop does not result in returning false, we return true.
1468 __ Bind(&return_true);
1469 __ LoadConst64(out, 1);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001470 __ Bc(&end);
Chris Larsen972d6d72015-10-20 11:29:12 -07001471
1472 // Return false and exit the function.
1473 __ Bind(&return_false);
1474 __ LoadConst64(out, 0);
1475 __ Bind(&end);
1476}
1477
Chris Larsen9701c2e2015-09-04 17:22:47 -07001478static void GenerateStringIndexOf(HInvoke* invoke,
1479 Mips64Assembler* assembler,
1480 CodeGeneratorMIPS64* codegen,
Chris Larsen9701c2e2015-09-04 17:22:47 -07001481 bool start_at_zero) {
1482 LocationSummary* locations = invoke->GetLocations();
1483 GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP;
1484
1485 // Note that the null check must have been done earlier.
1486 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1487
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001488 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1489 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsen9701c2e2015-09-04 17:22:47 -07001490 SlowPathCodeMIPS64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001491 HInstruction* code_point = invoke->InputAt(1);
1492 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001493 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001494 // Always needs the slow-path. We could directly dispatch to it,
1495 // but this case should be rare, so for simplicity just put the
1496 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001497 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001498 codegen->AddSlowPath(slow_path);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001499 __ Bc(slow_path->GetEntryLabel());
Chris Larsen9701c2e2015-09-04 17:22:47 -07001500 __ Bind(slow_path->GetExitLabel());
1501 return;
1502 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001503 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsen9701c2e2015-09-04 17:22:47 -07001504 GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>();
1505 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001506 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001507 codegen->AddSlowPath(slow_path);
1508 __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required
1509 }
1510
1511 if (start_at_zero) {
1512 DCHECK_EQ(tmp_reg, A2);
1513 // Start-index = 0.
1514 __ Clear(tmp_reg);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001515 }
1516
Serban Constantinescufc734082016-07-19 17:18:07 +01001517 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001518 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001519
1520 if (slow_path != nullptr) {
1521 __ Bind(slow_path->GetExitLabel());
1522 }
1523}
1524
1525// int java.lang.String.indexOf(int ch)
1526void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001527 LocationSummary* locations = new (allocator_) LocationSummary(
1528 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001529 // We have a hand-crafted assembly stub that follows the runtime
1530 // calling convention. So it's best to align the inputs accordingly.
1531 InvokeRuntimeCallingConvention calling_convention;
1532 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1533 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001534 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001535 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1536
1537 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1538 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1539}
1540
1541void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001542 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001543}
1544
1545// int java.lang.String.indexOf(int ch, int fromIndex)
1546void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001547 LocationSummary* locations = new (allocator_) LocationSummary(
1548 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001549 // We have a hand-crafted assembly stub that follows the runtime
1550 // calling convention. So it's best to align the inputs accordingly.
1551 InvokeRuntimeCallingConvention calling_convention;
1552 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1553 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1554 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001555 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001556 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1557}
1558
1559void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001560 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001561}
1562
Roland Levillaincc3839c2016-02-29 16:23:48 +00001563// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001564void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001565 LocationSummary* locations = new (allocator_) LocationSummary(
1566 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001567 InvokeRuntimeCallingConvention calling_convention;
1568 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1569 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1570 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1571 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001572 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001573 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1574}
1575
1576void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1577 Mips64Assembler* assembler = GetAssembler();
1578 LocationSummary* locations = invoke->GetLocations();
1579
1580 GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001581 SlowPathCodeMIPS64* slow_path =
1582 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001583 codegen_->AddSlowPath(slow_path);
1584 __ Beqzc(byte_array, slow_path->GetEntryLabel());
1585
Serban Constantinescufc734082016-07-19 17:18:07 +01001586 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001587 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001588 __ Bind(slow_path->GetExitLabel());
1589}
1590
Roland Levillaincc3839c2016-02-29 16:23:48 +00001591// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001592void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001593 LocationSummary* locations =
1594 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001595 InvokeRuntimeCallingConvention calling_convention;
1596 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1597 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1598 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001599 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001600 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1601}
1602
1603void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001604 // No need to emit code checking whether `locations->InAt(2)` is a null
1605 // pointer, as callers of the native method
1606 //
1607 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1608 //
1609 // all include a null check on `data` before calling that method.
Serban Constantinescufc734082016-07-19 17:18:07 +01001610 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001611 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001612}
1613
Roland Levillainf969a202016-03-09 16:14:00 +00001614// java.lang.StringFactory.newStringFromString(String toCopy)
Chris Larsen9701c2e2015-09-04 17:22:47 -07001615void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001616 LocationSummary* locations = new (allocator_) LocationSummary(
1617 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001618 InvokeRuntimeCallingConvention calling_convention;
1619 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001621 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>()));
1622}
1623
1624void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) {
1625 Mips64Assembler* assembler = GetAssembler();
1626 LocationSummary* locations = invoke->GetLocations();
1627
1628 GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001629 SlowPathCodeMIPS64* slow_path =
1630 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen9701c2e2015-09-04 17:22:47 -07001631 codegen_->AddSlowPath(slow_path);
1632 __ Beqzc(string_to_copy, slow_path->GetEntryLabel());
1633
Serban Constantinescufc734082016-07-19 17:18:07 +01001634 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001635 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Chris Larsen9701c2e2015-09-04 17:22:47 -07001636 __ Bind(slow_path->GetExitLabel());
1637}
1638
Chris Larsenddec7f92016-02-16 12:35:04 -08001639static void GenIsInfinite(LocationSummary* locations,
1640 bool is64bit,
1641 Mips64Assembler* assembler) {
1642 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
1643 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1644
1645 if (is64bit) {
1646 __ ClassD(FTMP, in);
1647 } else {
1648 __ ClassS(FTMP, in);
1649 }
1650 __ Mfc1(out, FTMP);
1651 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1652 __ Sltu(out, ZERO, out);
1653}
1654
1655// boolean java.lang.Float.isInfinite(float)
1656void IntrinsicLocationsBuilderMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001657 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001658}
1659
1660void IntrinsicCodeGeneratorMIPS64::VisitFloatIsInfinite(HInvoke* invoke) {
1661 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
1662}
1663
1664// boolean java.lang.Double.isInfinite(double)
1665void IntrinsicLocationsBuilderMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001666 CreateFPToIntLocations(allocator_, invoke);
Chris Larsenddec7f92016-02-16 12:35:04 -08001667}
1668
1669void IntrinsicCodeGeneratorMIPS64::VisitDoubleIsInfinite(HInvoke* invoke) {
1670 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
1671}
1672
Chris Larsene3660592016-11-09 11:13:42 -08001673// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
1674void IntrinsicLocationsBuilderMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001675 LocationSummary* locations =
1676 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsene3660592016-11-09 11:13:42 -08001677 locations->SetInAt(0, Location::RequiresRegister());
1678 locations->SetInAt(1, Location::RequiresRegister());
1679 locations->SetInAt(2, Location::RequiresRegister());
1680 locations->SetInAt(3, Location::RequiresRegister());
1681 locations->SetInAt(4, Location::RequiresRegister());
1682
Chris Larsen366d4332017-03-23 09:02:56 -07001683 locations->AddTemp(Location::RequiresRegister());
1684 locations->AddTemp(Location::RequiresRegister());
1685 locations->AddTemp(Location::RequiresRegister());
Chris Larsene3660592016-11-09 11:13:42 -08001686}
1687
1688void IntrinsicCodeGeneratorMIPS64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1689 Mips64Assembler* assembler = GetAssembler();
1690 LocationSummary* locations = invoke->GetLocations();
1691
1692 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001693 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001694 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001695 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsene3660592016-11-09 11:13:42 -08001696
1697 GpuRegister srcObj = locations->InAt(0).AsRegister<GpuRegister>();
1698 GpuRegister srcBegin = locations->InAt(1).AsRegister<GpuRegister>();
1699 GpuRegister srcEnd = locations->InAt(2).AsRegister<GpuRegister>();
1700 GpuRegister dstObj = locations->InAt(3).AsRegister<GpuRegister>();
1701 GpuRegister dstBegin = locations->InAt(4).AsRegister<GpuRegister>();
1702
1703 GpuRegister dstPtr = locations->GetTemp(0).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001704 GpuRegister srcPtr = locations->GetTemp(1).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001705 GpuRegister numChrs = locations->GetTemp(2).AsRegister<GpuRegister>();
Chris Larsene3660592016-11-09 11:13:42 -08001706
1707 Mips64Label done;
Chris Larsen366d4332017-03-23 09:02:56 -07001708 Mips64Label loop;
Chris Larsene3660592016-11-09 11:13:42 -08001709
1710 // Location of data in char array buffer.
1711 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1712
1713 // Get offset of value field within a string object.
1714 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1715
1716 __ Beqc(srcEnd, srcBegin, &done); // No characters to move.
1717
1718 // Calculate number of characters to be copied.
1719 __ Dsubu(numChrs, srcEnd, srcBegin);
1720
1721 // Calculate destination address.
1722 __ Daddiu(dstPtr, dstObj, data_offset);
1723 __ Dlsa(dstPtr, dstBegin, dstPtr, char_shift);
1724
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001725 if (mirror::kUseStringCompression) {
1726 Mips64Label uncompressed_copy, compressed_loop;
1727 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1728 // Load count field and extract compression flag.
1729 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
1730 __ Dext(TMP, TMP, 0, 1);
1731
Chris Larsen366d4332017-03-23 09:02:56 -07001732 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001733 __ Bnezc(TMP, &uncompressed_copy);
1734
1735 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
1736 __ Daddu(srcPtr, srcObj, srcBegin);
1737 __ Bind(&compressed_loop);
1738 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
1739 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
1740 __ Daddiu(numChrs, numChrs, -1);
1741 __ Daddiu(srcPtr, srcPtr, 1);
1742 __ Daddiu(dstPtr, dstPtr, 2);
1743 __ Bnezc(numChrs, &compressed_loop);
1744
1745 __ Bc(&done);
1746 __ Bind(&uncompressed_copy);
1747 }
1748
Chris Larsene3660592016-11-09 11:13:42 -08001749 // Calculate source address.
1750 __ Daddiu(srcPtr, srcObj, value_offset);
1751 __ Dlsa(srcPtr, srcBegin, srcPtr, char_shift);
1752
Chris Larsen366d4332017-03-23 09:02:56 -07001753 __ Bind(&loop);
1754 __ Lh(AT, srcPtr, 0);
1755 __ Daddiu(numChrs, numChrs, -1);
1756 __ Daddiu(srcPtr, srcPtr, char_size);
1757 __ Sh(AT, dstPtr, 0);
1758 __ Daddiu(dstPtr, dstPtr, char_size);
1759 __ Bnezc(numChrs, &loop);
Chris Larsene3660592016-11-09 11:13:42 -08001760
1761 __ Bind(&done);
1762}
1763
Chris Larsen5863f852017-03-23 15:41:37 -07001764// static void java.lang.System.arraycopy(Object src, int srcPos,
1765// Object dest, int destPos,
1766// int length)
1767void IntrinsicLocationsBuilderMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1768 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1769 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1770 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1771
1772 // As long as we are checking, we might as well check to see if the src and dest
1773 // positions are >= 0.
1774 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1775 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1776 // We will have to fail anyways.
1777 return;
1778 }
1779
1780 // And since we are already checking, check the length too.
1781 if (length != nullptr) {
1782 int32_t len = length->GetValue();
1783 if (len < 0) {
1784 // Just call as normal.
1785 return;
1786 }
1787 }
1788
1789 // Okay, it is safe to generate inline code.
1790 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001791 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen5863f852017-03-23 15:41:37 -07001792 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1793 locations->SetInAt(0, Location::RequiresRegister());
1794 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1795 locations->SetInAt(2, Location::RequiresRegister());
1796 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1797 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1798
1799 locations->AddTemp(Location::RequiresRegister());
1800 locations->AddTemp(Location::RequiresRegister());
1801 locations->AddTemp(Location::RequiresRegister());
1802}
1803
1804// Utility routine to verify that "length(input) - pos >= length"
1805static void EnoughItems(Mips64Assembler* assembler,
1806 GpuRegister length_input_minus_pos,
1807 Location length,
1808 SlowPathCodeMIPS64* slow_path) {
1809 if (length.IsConstant()) {
1810 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
1811
1812 if (IsInt<16>(length_constant)) {
1813 __ Slti(TMP, length_input_minus_pos, length_constant);
1814 __ Bnezc(TMP, slow_path->GetEntryLabel());
1815 } else {
1816 __ LoadConst32(TMP, length_constant);
1817 __ Bltc(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
1818 }
1819 } else {
1820 __ Bltc(length_input_minus_pos, length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1821 }
1822}
1823
1824static void CheckPosition(Mips64Assembler* assembler,
1825 Location pos,
1826 GpuRegister input,
1827 Location length,
1828 SlowPathCodeMIPS64* slow_path,
1829 bool length_is_input_length = false) {
1830 // Where is the length in the Array?
1831 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1832
1833 // Calculate length(input) - pos.
1834 if (pos.IsConstant()) {
1835 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1836 if (pos_const == 0) {
1837 if (!length_is_input_length) {
1838 // Check that length(input) >= length.
1839 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1840 EnoughItems(assembler, AT, length, slow_path);
1841 }
1842 } else {
1843 // Check that (length(input) - pos) >= zero.
1844 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1845 DCHECK_GT(pos_const, 0);
1846 __ Addiu32(AT, AT, -pos_const);
1847 __ Bltzc(AT, slow_path->GetEntryLabel());
1848
1849 // Verify that (length(input) - pos) >= length.
1850 EnoughItems(assembler, AT, length, slow_path);
1851 }
1852 } else if (length_is_input_length) {
1853 // The only way the copy can succeed is if pos is zero.
1854 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1855 __ Bnezc(pos_reg, slow_path->GetEntryLabel());
1856 } else {
1857 // Verify that pos >= 0.
1858 GpuRegister pos_reg = pos.AsRegister<GpuRegister>();
1859 __ Bltzc(pos_reg, slow_path->GetEntryLabel());
1860
1861 // Check that (length(input) - pos) >= zero.
1862 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
1863 __ Subu(AT, AT, pos_reg);
1864 __ Bltzc(AT, slow_path->GetEntryLabel());
1865
1866 // Verify that (length(input) - pos) >= length.
1867 EnoughItems(assembler, AT, length, slow_path);
1868 }
1869}
1870
1871void IntrinsicCodeGeneratorMIPS64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1872 Mips64Assembler* assembler = GetAssembler();
1873 LocationSummary* locations = invoke->GetLocations();
1874
1875 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
1876 Location src_pos = locations->InAt(1);
1877 GpuRegister dest = locations->InAt(2).AsRegister<GpuRegister>();
1878 Location dest_pos = locations->InAt(3);
1879 Location length = locations->InAt(4);
1880
1881 Mips64Label loop;
1882
1883 GpuRegister dest_base = locations->GetTemp(0).AsRegister<GpuRegister>();
1884 GpuRegister src_base = locations->GetTemp(1).AsRegister<GpuRegister>();
1885 GpuRegister count = locations->GetTemp(2).AsRegister<GpuRegister>();
1886
Vladimir Marko174b2e22017-10-12 13:34:49 +01001887 SlowPathCodeMIPS64* slow_path =
1888 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS64(invoke);
Chris Larsen5863f852017-03-23 15:41:37 -07001889 codegen_->AddSlowPath(slow_path);
1890
1891 // Bail out if the source and destination are the same (to handle overlap).
1892 __ Beqc(src, dest, slow_path->GetEntryLabel());
1893
1894 // Bail out if the source is null.
1895 __ Beqzc(src, slow_path->GetEntryLabel());
1896
1897 // Bail out if the destination is null.
1898 __ Beqzc(dest, slow_path->GetEntryLabel());
1899
1900 // Load length into register for count.
1901 if (length.IsConstant()) {
1902 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
1903 } else {
1904 // If the length is negative, bail out.
1905 // We have already checked in the LocationsBuilder for the constant case.
1906 __ Bltzc(length.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
1907
1908 __ Move(count, length.AsRegister<GpuRegister>());
1909 }
1910
1911 // Validity checks: source.
1912 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
1913
1914 // Validity checks: dest.
1915 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
1916
1917 // If count is zero, we're done.
1918 __ Beqzc(count, slow_path->GetExitLabel());
1919
1920 // Okay, everything checks out. Finally time to do the copy.
1921 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001922 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001923 DCHECK_EQ(char_size, 2u);
1924
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001925 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen5863f852017-03-23 15:41:37 -07001926
1927 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1928
1929 // Calculate source and destination addresses.
1930 if (src_pos.IsConstant()) {
1931 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
1932
1933 __ Daddiu64(src_base, src, data_offset + char_size * src_pos_const, TMP);
1934 } else {
1935 __ Daddiu64(src_base, src, data_offset, TMP);
1936 __ Dlsa(src_base, src_pos.AsRegister<GpuRegister>(), src_base, char_shift);
1937 }
1938 if (dest_pos.IsConstant()) {
1939 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1940
1941 __ Daddiu64(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
1942 } else {
1943 __ Daddiu64(dest_base, dest, data_offset, TMP);
1944 __ Dlsa(dest_base, dest_pos.AsRegister<GpuRegister>(), dest_base, char_shift);
1945 }
1946
1947 __ Bind(&loop);
1948 __ Lh(TMP, src_base, 0);
1949 __ Daddiu(src_base, src_base, char_size);
1950 __ Daddiu(count, count, -1);
1951 __ Sh(TMP, dest_base, 0);
1952 __ Daddiu(dest_base, dest_base, char_size);
1953 __ Bnezc(count, &loop);
1954
1955 __ Bind(slow_path->GetExitLabel());
1956}
1957
Chris Larsenab922502016-04-15 10:00:56 -07001958static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001959 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07001960 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001961 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07001962
1963 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
1964 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1965
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001966 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07001967 __ Dclz(TMP, in);
1968 __ LoadConst64(AT, INT64_C(0x8000000000000000));
Chris Larsen68db2a92016-09-14 15:41:29 -07001969 __ Dsrlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001970 } else {
1971 __ Clz(TMP, in);
1972 __ LoadConst32(AT, 0x80000000);
Chris Larsen68db2a92016-09-14 15:41:29 -07001973 __ Srlv(AT, AT, TMP);
Chris Larsenab922502016-04-15 10:00:56 -07001974 }
1975 // For either value of "type", when "in" is zero, "out" should also
1976 // be zero. Without this extra "and" operation, when "in" is zero,
1977 // "out" would be either Integer.MIN_VALUE, or Long.MIN_VALUE because
1978 // the MIPS logical shift operations "dsrlv", and "srlv" don't use
1979 // the shift amount (TMP) directly; they use either (TMP % 64) or
1980 // (TMP % 32), respectively.
Chris Larsen68db2a92016-09-14 15:41:29 -07001981 __ And(out, AT, in);
Chris Larsenab922502016-04-15 10:00:56 -07001982}
1983
1984// int java.lang.Integer.highestOneBit(int)
1985void IntrinsicLocationsBuilderMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001986 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07001987}
1988
1989void IntrinsicCodeGeneratorMIPS64::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001990 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07001991}
1992
1993// long java.lang.Long.highestOneBit(long)
1994void IntrinsicLocationsBuilderMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001995 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07001996}
1997
1998void IntrinsicCodeGeneratorMIPS64::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001999 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002000}
2001
2002static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002003 DataType::Type type,
Chris Larsenab922502016-04-15 10:00:56 -07002004 Mips64Assembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002005 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Chris Larsenab922502016-04-15 10:00:56 -07002006
2007 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2008 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2009
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002010 if (type == DataType::Type::kInt64) {
Chris Larsenab922502016-04-15 10:00:56 -07002011 __ Dsubu(TMP, ZERO, in);
2012 } else {
2013 __ Subu(TMP, ZERO, in);
2014 }
2015 __ And(out, TMP, in);
2016}
2017
2018// int java.lang.Integer.lowestOneBit(int)
2019void IntrinsicLocationsBuilderMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002020 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002021}
2022
2023void IntrinsicCodeGeneratorMIPS64::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002024 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002025}
2026
2027// long java.lang.Long.lowestOneBit(long)
2028void IntrinsicLocationsBuilderMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002029 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenab922502016-04-15 10:00:56 -07002030}
2031
2032void IntrinsicCodeGeneratorMIPS64::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002033 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler());
Chris Larsenab922502016-04-15 10:00:56 -07002034}
2035
Vladimir Markoca6fff82017-10-03 14:49:14 +01002036static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2037 LocationSummary* locations =
2038 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002039 InvokeRuntimeCallingConvention calling_convention;
2040
2041 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002042 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002043}
2044
Vladimir Markoca6fff82017-10-03 14:49:14 +01002045static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2046 LocationSummary* locations =
2047 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsen74c20582017-03-28 22:17:35 -07002048 InvokeRuntimeCallingConvention calling_convention;
2049
2050 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2051 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002052 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsen74c20582017-03-28 22:17:35 -07002053}
2054
2055static void GenFPToFPCall(HInvoke* invoke,
2056 CodeGeneratorMIPS64* codegen,
2057 QuickEntrypointEnum entry) {
2058 LocationSummary* locations = invoke->GetLocations();
2059 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
2060 DCHECK_EQ(in, F12);
2061 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2062 DCHECK_EQ(out, F0);
2063
2064 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2065}
2066
2067static void GenFPFPToFPCall(HInvoke* invoke,
2068 CodeGeneratorMIPS64* codegen,
2069 QuickEntrypointEnum entry) {
2070 LocationSummary* locations = invoke->GetLocations();
2071 FpuRegister in0 = locations->InAt(0).AsFpuRegister<FpuRegister>();
2072 DCHECK_EQ(in0, F12);
2073 FpuRegister in1 = locations->InAt(1).AsFpuRegister<FpuRegister>();
2074 DCHECK_EQ(in1, F13);
2075 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
2076 DCHECK_EQ(out, F0);
2077
2078 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2079}
2080
2081// static double java.lang.Math.cos(double a)
2082void IntrinsicLocationsBuilderMIPS64::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002083 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002084}
2085
2086void IntrinsicCodeGeneratorMIPS64::VisitMathCos(HInvoke* invoke) {
2087 GenFPToFPCall(invoke, codegen_, kQuickCos);
2088}
2089
2090// static double java.lang.Math.sin(double a)
2091void IntrinsicLocationsBuilderMIPS64::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002092 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002093}
2094
2095void IntrinsicCodeGeneratorMIPS64::VisitMathSin(HInvoke* invoke) {
2096 GenFPToFPCall(invoke, codegen_, kQuickSin);
2097}
2098
2099// static double java.lang.Math.acos(double a)
2100void IntrinsicLocationsBuilderMIPS64::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002101 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002102}
2103
2104void IntrinsicCodeGeneratorMIPS64::VisitMathAcos(HInvoke* invoke) {
2105 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2106}
2107
2108// static double java.lang.Math.asin(double a)
2109void IntrinsicLocationsBuilderMIPS64::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002110 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002111}
2112
2113void IntrinsicCodeGeneratorMIPS64::VisitMathAsin(HInvoke* invoke) {
2114 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2115}
2116
2117// static double java.lang.Math.atan(double a)
2118void IntrinsicLocationsBuilderMIPS64::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002119 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002120}
2121
2122void IntrinsicCodeGeneratorMIPS64::VisitMathAtan(HInvoke* invoke) {
2123 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2124}
2125
2126// static double java.lang.Math.atan2(double y, double x)
2127void IntrinsicLocationsBuilderMIPS64::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002128 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002129}
2130
2131void IntrinsicCodeGeneratorMIPS64::VisitMathAtan2(HInvoke* invoke) {
2132 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2133}
2134
Vladimir Marko4d179872018-01-19 14:50:10 +00002135// static double java.lang.Math.pow(double y, double x)
2136void IntrinsicLocationsBuilderMIPS64::VisitMathPow(HInvoke* invoke) {
2137 CreateFPFPToFPCallLocations(allocator_, invoke);
2138}
2139
2140void IntrinsicCodeGeneratorMIPS64::VisitMathPow(HInvoke* invoke) {
2141 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2142}
2143
Chris Larsen74c20582017-03-28 22:17:35 -07002144// static double java.lang.Math.cbrt(double a)
2145void IntrinsicLocationsBuilderMIPS64::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002146 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002147}
2148
2149void IntrinsicCodeGeneratorMIPS64::VisitMathCbrt(HInvoke* invoke) {
2150 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2151}
2152
2153// static double java.lang.Math.cosh(double x)
2154void IntrinsicLocationsBuilderMIPS64::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002155 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002156}
2157
2158void IntrinsicCodeGeneratorMIPS64::VisitMathCosh(HInvoke* invoke) {
2159 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2160}
2161
2162// static double java.lang.Math.exp(double a)
2163void IntrinsicLocationsBuilderMIPS64::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002164 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002165}
2166
2167void IntrinsicCodeGeneratorMIPS64::VisitMathExp(HInvoke* invoke) {
2168 GenFPToFPCall(invoke, codegen_, kQuickExp);
2169}
2170
2171// static double java.lang.Math.expm1(double x)
2172void IntrinsicLocationsBuilderMIPS64::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002173 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002174}
2175
2176void IntrinsicCodeGeneratorMIPS64::VisitMathExpm1(HInvoke* invoke) {
2177 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2178}
2179
2180// static double java.lang.Math.hypot(double x, double y)
2181void IntrinsicLocationsBuilderMIPS64::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002182 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002183}
2184
2185void IntrinsicCodeGeneratorMIPS64::VisitMathHypot(HInvoke* invoke) {
2186 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2187}
2188
2189// static double java.lang.Math.log(double a)
2190void IntrinsicLocationsBuilderMIPS64::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002191 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002192}
2193
2194void IntrinsicCodeGeneratorMIPS64::VisitMathLog(HInvoke* invoke) {
2195 GenFPToFPCall(invoke, codegen_, kQuickLog);
2196}
2197
2198// static double java.lang.Math.log10(double x)
2199void IntrinsicLocationsBuilderMIPS64::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002200 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002201}
2202
2203void IntrinsicCodeGeneratorMIPS64::VisitMathLog10(HInvoke* invoke) {
2204 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2205}
2206
2207// static double java.lang.Math.nextAfter(double start, double direction)
2208void IntrinsicLocationsBuilderMIPS64::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002209 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002210}
2211
2212void IntrinsicCodeGeneratorMIPS64::VisitMathNextAfter(HInvoke* invoke) {
2213 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2214}
2215
2216// static double java.lang.Math.sinh(double x)
2217void IntrinsicLocationsBuilderMIPS64::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002218 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002219}
2220
2221void IntrinsicCodeGeneratorMIPS64::VisitMathSinh(HInvoke* invoke) {
2222 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2223}
2224
2225// static double java.lang.Math.tan(double a)
2226void IntrinsicLocationsBuilderMIPS64::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002227 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002228}
2229
2230void IntrinsicCodeGeneratorMIPS64::VisitMathTan(HInvoke* invoke) {
2231 GenFPToFPCall(invoke, codegen_, kQuickTan);
2232}
2233
2234// static double java.lang.Math.tanh(double x)
2235void IntrinsicLocationsBuilderMIPS64::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002236 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsen74c20582017-03-28 22:17:35 -07002237}
2238
2239void IntrinsicCodeGeneratorMIPS64::VisitMathTanh(HInvoke* invoke) {
2240 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2241}
2242
Chris Larsen5633ce72017-04-10 15:47:40 -07002243// long java.lang.Integer.valueOf(long)
2244void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2245 InvokeRuntimeCallingConvention calling_convention;
2246 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2247 invoke,
2248 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002249 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002250 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2251}
2252
2253void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) {
2254 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2255 LocationSummary* locations = invoke->GetLocations();
2256 Mips64Assembler* assembler = GetAssembler();
2257 InstructionCodeGeneratorMIPS64* icodegen =
2258 down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor());
2259
2260 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2261 InvokeRuntimeCallingConvention calling_convention;
2262 if (invoke->InputAt(0)->IsConstant()) {
2263 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2264 if (value >= info.low && value <= info.high) {
2265 // Just embed the j.l.Integer in the code.
2266 ScopedObjectAccess soa(Thread::Current());
2267 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2268 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2269 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2270 __ LoadConst64(out, address);
2271 } else {
2272 // Allocate and initialize a new j.l.Integer.
2273 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2274 // JIT object table.
2275 uint32_t address =
2276 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2277 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2278 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2279 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2280 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2281 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2282 // one.
2283 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2284 }
2285 } else {
2286 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
2287 Mips64Label allocate, done;
2288 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
2289
2290 // Is (info.low <= in) && (in <= info.high)?
2291 __ Addiu32(out, in, -info.low);
2292 // As unsigned quantities is out < (info.high - info.low + 1)?
2293 __ LoadConst32(AT, count);
2294 // Branch if out >= (info.high - info.low + 1).
2295 // This means that "in" is outside of the range [info.low, info.high].
2296 __ Bgeuc(out, AT, &allocate);
2297
2298 // If the value is within the bounds, load the j.l.Integer directly from the array.
2299 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2300 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2301 __ LoadConst64(TMP, data_offset + address);
2302 __ Dlsa(out, out, TMP, TIMES_4);
2303 __ Lwu(out, out, 0);
2304 __ MaybeUnpoisonHeapReference(out);
2305 __ Bc(&done);
2306
2307 __ Bind(&allocate);
2308 // Otherwise allocate and initialize a new j.l.Integer.
2309 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2310 __ LoadConst64(calling_convention.GetRegisterAt(0), address);
2311 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2312 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2313 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2314 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2315 // one.
2316 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2317 __ Bind(&done);
2318 }
2319}
2320
Chris Larsenb065b032017-11-02 12:13:20 -07002321// static boolean java.lang.Thread.interrupted()
2322void IntrinsicLocationsBuilderMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2323 LocationSummary* locations =
2324 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2325 locations->SetOut(Location::RequiresRegister());
2326}
2327
2328void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) {
2329 Mips64Assembler* assembler = GetAssembler();
2330 GpuRegister out = invoke->GetLocations()->Out().AsRegister<GpuRegister>();
2331 int32_t offset = Thread::InterruptedOffset<kMips64PointerSize>().Int32Value();
2332 __ LoadFromOffset(kLoadWord, out, TR, offset);
2333 Mips64Label done;
2334 __ Beqzc(out, &done);
2335 __ Sync(0);
2336 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2337 __ Sync(0);
2338 __ Bind(&done);
2339}
2340
Hans Boehm4d4175a2018-03-09 17:05:28 -08002341void IntrinsicLocationsBuilderMIPS64::VisitReachabilityFence(HInvoke* invoke) {
2342 LocationSummary* locations =
2343 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2344 locations->SetInAt(0, Location::Any());
2345}
2346
2347void IntrinsicCodeGeneratorMIPS64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
2348
Aart Bik2f9fcc92016-03-01 15:16:54 -08002349UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002350UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002351
Aart Bikff7d89c2016-11-07 08:49:28 -08002352UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf);
2353UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002354UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend);
2355UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength);
2356UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString);
2357UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend);
2358UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength);
2359UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002360
Aart Bik0e54c012016-03-04 12:08:31 -08002361// 1.8.
2362UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt)
2363UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddLong)
2364UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetInt)
2365UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetLong)
2366UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002367
Aart Bik2f9fcc92016-03-01 15:16:54 -08002368UNREACHABLE_INTRINSICS(MIPS64)
Chris Larsen3039e382015-08-26 07:54:08 -07002369
2370#undef __
2371
2372} // namespace mips64
2373} // namespace art